Skip to content

Commit a4e3787

Browse files
olichek1o0github-actions
authored
Release 1.9.0 (#776)
* Generic object added to Dataset model * Migration file; nullable fields * force change to filename_pattern field (previous migration failed to apply correctly) * Fix migrations in tests * added subjectTrials fixture * Fix REST tests * timezone.utc -> datetime.utcnow * Fix test * bump version number * GitHub Actions generated requirements_frozen.txt * rephrase password differences explitely * GitHub Actions generated requirements_frozen.txt --------- Co-authored-by: Miles Wells <k1o0@5tk.co> Co-authored-by: github-actions <github-actions@github.com>
1 parent 661e137 commit a4e3787

13 files changed

+203
-46
lines changed

alyx/alyx/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
VERSION = __version__ = '1.8.0'
1+
VERSION = __version__ = '1.9.0'

alyx/data/fixtures/data.datasettype.json

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1747,5 +1747,16 @@
17471747
"description": "A list of acquisition devices and behaviour protocols, along with the data and sync file location. The root keys are (devices, sync, task, procedures).",
17481748
"filename_pattern": "_*_experiment.description.*"
17491749
}
1750+
},
1751+
{
1752+
"model": "data.datasettype",
1753+
"pk": "e0614a49-8f18-46b1-a4d9-0710a080fd8d",
1754+
"fields": {
1755+
"json": null,
1756+
"name": "subjectTrials.table",
1757+
"created_by": null,
1758+
"description": "All trials data for a given subject, contains the same columns as trials.table, plus \"session\", \"session_start_time\" and \"session_number\"",
1759+
"filename_pattern": ""
1760+
}
17501761
}
17511762
]
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# Generated by Django 4.1.5 on 2023-01-31 12:34
2+
3+
from django.db import migrations, transaction, models
4+
import django.db.models.deletion
5+
6+
7+
def forwards(apps, _):
8+
"""Go through the datasets and assign the session field to the content_object field"""
9+
Dataset = apps.get_model('data', 'Dataset')
10+
with transaction.atomic():
11+
for dataset in Dataset.objects.filter(session__isnull=False).iterator():
12+
if dataset.content_object is None:
13+
dataset.content_object = dataset.session
14+
dataset.save()
15+
16+
17+
def backwards(apps, _):
18+
Dataset = apps.get_model('data', 'Dataset')
19+
with transaction.atomic():
20+
for dataset in Dataset.objects.filter(session__isnull=False).iterator():
21+
if dataset.content_object is not None:
22+
dataset.content_object = None
23+
dataset.save()
24+
25+
26+
class Migration(migrations.Migration):
27+
28+
dependencies = [
29+
('contenttypes', '0002_remove_content_type_name'),
30+
('data', '0012_alter_datasettype_filename_pattern_and_more'),
31+
]
32+
33+
operations = [
34+
migrations.AddField(
35+
model_name='dataset',
36+
name='content_type',
37+
field=models.ForeignKey(null=True, blank=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'),
38+
preserve_default=False,
39+
),
40+
migrations.AddField(
41+
model_name='dataset',
42+
name='object_id',
43+
field=models.UUIDField(
44+
null=True, blank=True, help_text='UUID, an object of content_type with this ID must already exist to attach a note.'),
45+
preserve_default=False,
46+
),
47+
# migrations.RunPython(forwards, backwards)
48+
]
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# Generated by Django 4.1.5 on 2023-02-01 15:32
2+
# NB: The previous migrations (0011 and 0012) somehow failed to set the filename_pattern field as
3+
# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this
4+
# issue.
5+
6+
import alyx.base
7+
from django.db import migrations, transaction
8+
9+
PATTERN = '$$$'
10+
11+
12+
def fix_null_fields(apps, _):
13+
"""Populate null filename_pattern fields before making column not null"""
14+
DatasetType = apps.get_model('data', 'DatasetType')
15+
assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count()
16+
with transaction.atomic():
17+
for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator():
18+
dtype.filename_pattern = PATTERN + dtype.name
19+
dtype.save()
20+
21+
22+
def null_fields(apps, _):
23+
"""Reset previously null filename_pattern fields"""
24+
DatasetType = apps.get_model('data', 'DatasetType')
25+
with transaction.atomic():
26+
for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator():
27+
dtype.filename_pattern = None
28+
dtype.save()
29+
30+
31+
class Migration(migrations.Migration):
32+
33+
dependencies = [
34+
('data', '0013_dataset_content_type_dataset_object_id'),
35+
]
36+
37+
operations = [
38+
migrations.RunPython(fix_null_fields, null_fields),
39+
migrations.AlterField(
40+
model_name='datasettype',
41+
name='filename_pattern',
42+
field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, unique=True),
43+
),
44+
]
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# Generated by Django 4.1.5 on 2023-02-01 15:33
2+
# NB: The previous migrations (0011 and 0012) somehow failed to set the filname_pattern field as
3+
# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this
4+
# issue.
5+
6+
import alyx.base
7+
from django.db import migrations, transaction
8+
9+
PATTERN = '$$$'
10+
11+
12+
def fix_null_fields(apps, _):
13+
"""Populate null filename_pattern fields before making column not null"""
14+
DatasetType = apps.get_model('data', 'DatasetType')
15+
assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count()
16+
with transaction.atomic():
17+
for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator():
18+
dtype.filename_pattern = PATTERN + dtype.name
19+
dtype.save()
20+
21+
22+
def null_fields(apps, _):
23+
"""Reset previously null filename_pattern fields"""
24+
DatasetType = apps.get_model('data', 'DatasetType')
25+
with transaction.atomic():
26+
for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator():
27+
dtype.filename_pattern = None
28+
dtype.save()
29+
30+
31+
class Migration(migrations.Migration):
32+
33+
dependencies = [
34+
('data', '0014_alter_datasettype_filename_pattern'),
35+
]
36+
37+
operations = [
38+
migrations.AlterField(
39+
model_name='datasettype',
40+
name='filename_pattern',
41+
field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, null=True, unique=True),
42+
),
43+
migrations.RunPython(null_fields, fix_null_fields),
44+
]

alyx/data/models.py

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
from django.core.validators import RegexValidator
44
from django.db import models
55
from django.utils import timezone
6+
from django.contrib.contenttypes.fields import GenericForeignKey
7+
from django.contrib.contenttypes.models import ContentType
68

79
from alyx.settings import TIME_ZONE, AUTH_USER_MODEL
810
from actions.models import Session
@@ -294,6 +296,13 @@ class Dataset(BaseExperimentalData):
294296
"""
295297
objects = DatasetManager()
296298

299+
# Generic foreign key to arbitrary model instances allows polymorphic relationships
300+
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, null=True, blank=True)
301+
object_id = models.UUIDField(help_text="UUID, an object of content_type with this "
302+
"ID must already exist to attach a note.",
303+
null=True, blank=True)
304+
content_object = GenericForeignKey()
305+
297306
file_size = models.BigIntegerField(blank=True, null=True, help_text="Size in bytes")
298307

299308
md5 = models.UUIDField(blank=True, null=True,
@@ -334,31 +343,19 @@ class Dataset(BaseExperimentalData):
334343
@property
335344
def is_online(self):
336345
fr = self.file_records.filter(data_repository__globus_is_personal=False)
337-
if fr:
338-
return all(fr.values_list('exists', flat=True))
339-
else:
340-
return False
346+
return bool(fr.count() and all(fr.values_list('exists', flat=True)))
341347

342348
@property
343349
def is_protected(self):
344-
tags = self.tags.filter(protected=True)
345-
if tags.count() > 0:
346-
return True
347-
else:
348-
return False
350+
return bool(self.tags.filter(protected=True).count())
349351

350352
@property
351353
def is_public(self):
352-
tags = self.tags.filter(public=True)
353-
if tags.count() > 0:
354-
return True
355-
else:
356-
return False
354+
return bool(self.tags.filter(public=True).count())
357355

358356
@property
359357
def data_url(self):
360-
records = self.file_records.filter(data_repository__data_url__isnull=False,
361-
exists=True)
358+
records = self.file_records.filter(data_repository__data_url__isnull=False, exists=True)
362359
# returns preferentially globus non-personal endpoint
363360
if records:
364361
order_keys = ('data_repository__globus_is_personal', '-data_repository__name')

alyx/data/tests.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,26 @@
44
from django.db.models import ProtectedError
55

66
from data.models import Dataset, DatasetType, Tag
7+
from subjects.models import Subject
8+
from misc.models import Lab
79
from data.transfers import get_dataset_type
810

911

1012
class TestModel(TestCase):
1113
def test_model_methods(self):
1214
(dset, _) = Dataset.objects.get_or_create(name='toto.npy')
1315

14-
assert dset.is_online is False
15-
assert dset.is_public is False
16-
assert dset.is_protected is False
16+
self.assertIs(dset.is_online, False)
17+
self.assertIs(dset.is_public, False)
18+
self.assertIs(dset.is_protected, False)
19+
20+
def test_generic_foreign_key(self):
21+
# Attempt to associate a dataset with a subject
22+
self.lab = Lab.objects.create(name='test_lab')
23+
subj = Subject.objects.create(nickname='foo', birth_date='2018-09-01', lab=self.lab)
24+
dset = Dataset(name='toto.npy', content_object=subj)
25+
26+
self.assertIs(dset.content_object, subj)
1727

1828
def test_delete(self):
1929
(dset, _) = Dataset.objects.get_or_create(name='foo.npy')

alyx/data/tests_rest.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import datetime
2-
import os.path as op
2+
from pathlib import PurePosixPath
33
import uuid
44

55
from django.contrib.auth import get_user_model
@@ -372,11 +372,11 @@ def _assert_registration(self, r, data):
372372

373373
self.assertEqual(d0['file_records'][0]['data_repository'], 'dr')
374374
self.assertEqual(d0['file_records'][0]['relative_path'],
375-
op.join(data['path'], 'a.b.e1'))
375+
PurePosixPath(data['path'], 'a.b.e1').as_posix())
376376

377377
self.assertEqual(d1['file_records'][0]['data_repository'], 'dr')
378378
self.assertEqual(d1['file_records'][0]['relative_path'],
379-
op.join(data['path'], 'a.c.e2'))
379+
PurePosixPath(data['path'], 'a.c.e2').as_posix())
380380

381381
def test_register_existence_options(self):
382382

@@ -509,8 +509,8 @@ def test_register_with_revision(self):
509509
self.assertTrue(not r['revision'])
510510
self.assertEqual(r['collection'], 'dir')
511511
# Check the revision relative path doesn't exist
512-
self.assertTrue(r['file_records'][0]['relative_path'] ==
513-
op.join(data['path'], data['filenames']))
512+
self.assertEqual(r['file_records'][0]['relative_path'],
513+
PurePosixPath(data['path'], data['filenames']).as_posix())
514514

515515
# Now test specifying a revision in path
516516
data = {'path': '%s/2018-01-01/002/dir/#v1#' % self.subject,
@@ -523,7 +523,7 @@ def test_register_with_revision(self):
523523
self.assertTrue(r['revision'] == 'v1')
524524
self.assertEqual('dir', r['collection'])
525525
# Check file record relative path includes revision
526-
self.assertTrue('#v1#' in r['file_records'][0]['relative_path'])
526+
self.assertIn('#v1#', r['file_records'][0]['relative_path'])
527527

528528
# Now test specifying a collection and a revision in filename
529529
data = {'path': '%s/2018-01-01/002/dir' % self.subject,
@@ -535,7 +535,7 @@ def test_register_with_revision(self):
535535
self.assertTrue(r['revision'] == 'v1')
536536
self.assertTrue(r['collection'] == 'dir/dir1')
537537
# Check file record relative path includes revision
538-
self.assertTrue('#v1#' in r['file_records'][0]['relative_path'])
538+
self.assertIn('#v1#', r['file_records'][0]['relative_path'])
539539

540540
# Test that giving nested revision folders gives out an error
541541
data = {'path': '%s/2018-01-01/002/dir' % self.subject,

alyx/data/transfers.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import os.path as op
55
import re
66
import time
7-
from pathlib import Path
7+
from pathlib import Path, PurePosixPath
88

99
from django.db.models import Case, When, Count, Q, F
1010
import globus_sdk
@@ -88,7 +88,7 @@ def _get_absolute_path(file_record):
8888
path2 = path2[6:]
8989
if path2.startswith('/'):
9090
path2 = path2[1:]
91-
path = op.join(path1, path2)
91+
path = PurePosixPath(path1, path2).as_posix()
9292
return path
9393

9494

@@ -248,7 +248,7 @@ def _create_dataset_file_records(
248248

249249
assert session is not None
250250
revision_name = f'#{revision.name}#' if revision else ''
251-
relative_path = op.join(rel_dir_path, collection or '', revision_name, filename)
251+
relative_path = PurePosixPath(rel_dir_path, collection or '', revision_name, filename)
252252
dataset_type = get_dataset_type(filename, DatasetType.objects.all())
253253
data_format = get_data_format(filename)
254254
assert dataset_type
@@ -261,8 +261,9 @@ def _create_dataset_file_records(
261261

262262
# Get or create the dataset.
263263
dataset, is_new = Dataset.objects.get_or_create(
264-
collection=collection, name=filename, session=session,
265-
dataset_type=dataset_type, data_format=data_format, revision=revision)
264+
collection=collection, name=filename, session=session, # content_object=session,
265+
dataset_type=dataset_type, data_format=data_format, revision=revision
266+
)
266267
dataset.default_dataset = default is True
267268
dataset.save()
268269

@@ -303,7 +304,7 @@ def _create_dataset_file_records(
303304
exists = repo in exists_in
304305
# Do not create a new file record if it already exists.
305306
fr, is_new = FileRecord.objects.get_or_create(
306-
dataset=dataset, data_repository=repo, relative_path=relative_path)
307+
dataset=dataset, data_repository=repo, relative_path=relative_path.as_posix())
307308
if is_new or is_patched:
308309
fr.exists = exists
309310
fr.json = None # this is important if a dataset is patched during an ongoing transfer

alyx/subjects/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ def nicknamesafe(self):
263263
def age_days(self):
264264
if (self.death_date is None and self.birth_date is not None):
265265
# subject still alive
266-
age = datetime.now(timezone.utc).date() - self.birth_date
266+
age = datetime.utcnow().date() - self.birth_date
267267
elif (self.death_date is not None and self.birth_date is not None):
268268
# subject is dead
269269
age = self.death_date - self.birth_date

0 commit comments

Comments
 (0)