"""Unit tests for the one.alf.io module."""importloggingimportunittestimportunittest.mockimporttempfilefrompathlibimportPathimportshutilimportjsonimportuuidimportyamlimportnumpyasnpimportnumpy.testingimportpandasaspdfromiblutil.ioimportjsonableimportone.alf.ioasalfiofromone.alf.exceptionsimportALFObjectNotFoundfromone.alf.specimportFILE_SPEC,regexfromone.alf.pathimportALFPathtry:importsparseSKIP_SPARSE=FalseexceptModuleNotFoundError:SKIP_SPARSE=True
[docs]deftest_to_dataframe_vectors(self):vectors=alfio.AlfBunch({'titi':np.random.rand(500,1),'toto':np.random.rand(500),'tata':np.random.rand(500,12)})df=vectors.to_df()self.assertTrue(np.all(df['titi'].values==vectors.titi[:,0]))self.assertTrue(np.all(df['toto'].values==vectors.toto))self.assertTrue(np.all(df['tata_0'].values==vectors.tata[:,0]))self.assertTrue(np.all(df['tata_1'].values==vectors.tata[:,1]))self.assertTrue(len(df.columns)==12)self.assertEqual(10,len(df.filter(regex=r'tata_\d+',axis=1).columns),'failed to truncate columns')
[docs]deftest_from_dataframe(self):"""Tests for AlfBunch.from_df method."""cols=['foo_0','foo_1','bar_0','bar_1','baz']df=pd.DataFrame(np.random.rand(10,5),columns=cols)a=alfio.AlfBunch.from_df(df)self.assertIsInstance(a,alfio.AlfBunch)self.assertCountEqual(['foo','bar','baz'],a.keys())numpy.testing.assert_array_equal(df['foo_0'],a['foo'][:,0])
[docs]deftest_append_numpy(self):a=alfio.AlfBunch({'titi':np.random.rand(500),'toto':np.random.rand(500)})b=alfio.AlfBunch({})# test with empty elementsself.assertTrue(np.all(np.equal(a.append({})['titi'],a['titi'])))self.assertTrue(np.all(np.equal(b.append(a)['titi'],a['titi'])))self.assertEqual(b.append({}),{})# test with numpy arraysb=alfio.AlfBunch({'titi':np.random.rand(250),'toto':np.random.rand(250)})c=a.append(b)t=np.all(np.equal(c['titi'][0:500],a['titi']))t&=np.all(np.equal(c['toto'][0:500],a['toto']))t&=np.all(np.equal(c['titi'][500:],b['titi']))t&=np.all(np.equal(c['toto'][500:],b['toto']))self.assertTrue(t)a.append(b,inplace=True)self.assertTrue(np.all(np.equal(c['toto'],a['toto'])))self.assertTrue(np.all(np.equal(c['titi'],a['titi'])))# test warning thrown when uneven append occurswithself.assertLogs('one.alf.io',logging.WARNING):a.append({'titi':np.random.rand(10),'toto':np.random.rand(4)})
[docs]deftest_append_list(self):# test with listsa=alfio.AlfBunch({'titi':[0,1,3],'toto':['a','b','c']})b=alfio.AlfBunch({'titi':[1,2,4],'toto':['d','e','f']})c=a.append(b)self.assertTrue(len(c['toto'])==6)self.assertTrue(len(a['toto'])==3)c=c.append(b)self.assertTrue(len(c['toto'])==9)self.assertTrue(len(a['toto'])==3)c.append(b,inplace=True)self.assertTrue(len(c['toto'])==12)self.assertTrue(len(a['toto'])==3)withself.assertRaises(NotImplementedError):a.append(alfio.AlfBunch({'foobar':[8,9,10]}))a['foobar']='123'withself.assertLogs(logging.getLogger('one.alf.io'),logging.WARNING)aslog:a.append({'titi':[5],'toto':[8],'foobar':'d'})self.assertTrue('str'inlog.output[0])
[docs]deftest_npy_parts_and_file_filters(self):a={'riri':np.random.rand(100),'fifi':np.random.rand(100)}alfio.save_object_npy(self.tmpdir,a,'neuveux',parts='tutu')alfio.save_object_npy(self.tmpdir,a,'neuveux',parts='tutu',timescale='toto')self.assertTrue(alfio.exists(self.tmpdir,'neuveux'))b=alfio.load_object(self.tmpdir,'neuveux',short_keys=True)# Should include timescale in keysself.assertCountEqual(list(b.keys()),['fifi','fifi_toto','riri','riri_toto'])forkina:self.assertTrue(np.all(a[k]==b[k]))# Test load with extra filterb=alfio.load_object(self.tmpdir,'neuveux',timescale='toto',short_keys=True)self.assertCountEqual(list(b.keys()),['fifi_toto','riri_toto'])withself.assertRaises(ALFObjectNotFound):alfio.load_object(self.tmpdir,'neuveux',timescale='toto',namespace='baz')# also test file filters through wildcardself.assertTrue(alfio.exists(self.tmpdir,'neu*'))c=alfio.load_object(self.tmpdir,'neuveux',timescale='to*',short_keys=True)self.assertEqual(set(c.keys()),set([kforkinc.keys()ifk.endswith('toto')]))# test with the long keysb=alfio.load_object(self.tmpdir,'neuveux',short_keys=False)expected=['fifi.tutu','fifi_toto.tutu','riri.tutu','riri_toto.tutu']self.assertCountEqual(list(b.keys()),expected)# Test duplicate attributesalfio.save_object_npy(self.tmpdir,a,'neuveux',parts=['tutu','titi'])withself.assertRaises(AssertionError):alfio.load_object(self.tmpdir,'neuveux',short_keys=True)# Restricting by extra parts and using long keys should succeedalfio.load_object(self.tmpdir,'neuveux',extra=['tutu','titi'])alfio.load_object(self.tmpdir,'neuveux',short_keys=False)
[docs]deftest_filter_by(self):"""Test for one.alf.io.filter_by."""spec_idx_map=regex(FILE_SPEC).groupindexfile_names=['noalf.file','_ibl_trials.intervals.npy','_ibl_trials.intervals_bpod.csv','wheel.position.npy','wheel.timestamps.npy','wheelMoves.intervals.npy','_namespace_obj.attr_timescale.raw.v12.ext']forfinfile_names:(self.tmpdir/f).touch()# Test filter with None; should return files with no non-standard timescalealf_files,_=alfio.filter_by(self.tmpdir,timescale=None)expected=['wheel.position.npy','wheel.timestamps.npy','wheelMoves.intervals.npy','_ibl_trials.intervals.npy']self.assertTrue(all(isinstance(x,ALFPath)forxinalf_files))self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter with None attribute')# Test filtering by object; should return only 'wheel' ALF objectsalf_files,parts=alfio.filter_by(self.tmpdir,object='wheel')expected=['wheel.position.npy','wheel.timestamps.npy']self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter by object')self.assertEqual(len(alf_files),len(parts))# Test wildcards; should return 'wheel' and 'wheelMoves' ALF objectsalf_files,_=alfio.filter_by(self.tmpdir,object='wh*')expected=['wheel.position.npy','wheel.timestamps.npy','wheelMoves.intervals.npy']self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter with wildcard')# Test wildcard arraysalf_files,_=alfio.filter_by(self.tmpdir,object='wh*',attribute=['time*','pos*'])expected=['wheel.position.npy','wheel.timestamps.npy']self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter with wildcard')# Test filtering by specific timescale; test parts returnedalf_files,parts=alfio.filter_by(self.tmpdir,timescale='bpod')expected=[ALFPath('_ibl_trials.intervals_bpod.csv')]self.assertEqual(alf_files,expected,'failed to filter by timescale')expected=('ibl','trials','intervals','bpod',None,'csv')self.assertTupleEqual(parts[0],expected)self.assertEqual(len(parts[0]),len(spec_idx_map))self.assertEqual(parts[0][spec_idx_map['timescale']-1],'bpod')# Test filtering multiple attributes; should return only trials intervalsalf_files,_=alfio.filter_by(self.tmpdir,attribute='intervals',object='trials')expected=['_ibl_trials.intervals.npy','_ibl_trials.intervals_bpod.csv']self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter by multiple attribute')# Test returning only ALF filesalf_files,_=alfio.filter_by(self.tmpdir)self.assertCountEqual(alf_files,map(ALFPath,file_names[1:]),'failed to return ALF files')# Test return emptyout=alfio.filter_by(self.tmpdir,object=None)self.assertEqual(out,([],[]))# Test extrasalf_files,_=alfio.filter_by(self.tmpdir,extra='v12')expected=[ALFPath('_namespace_obj.attr_timescale.raw.v12.ext')]self.assertEqual(alf_files,expected,'failed to filter extra attributes')alf_files,_=alfio.filter_by(self.tmpdir,extra=['v12','raw'])expected=[ALFPath('_namespace_obj.attr_timescale.raw.v12.ext')]self.assertEqual(alf_files,expected,'failed to filter extra attributes as list')alf_files,_=alfio.filter_by(self.tmpdir,extra=['foo','v12'])self.assertEqual(alf_files,[],'failed to filter extra attributes')# Assert kwarg validation; should raise TypeErrorwithself.assertRaises(TypeError):alfio.filter_by(self.tmpdir,unknown=None)# Check regular expression searchalf_files,_=alfio.filter_by(self.tmpdir,object='^wheel.*',wildcards=False)expected=['wheel.position.npy','wheel.timestamps.npy','wheelMoves.intervals.npy']self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter by regex')# Should work with listsalf_files,_=alfio.filter_by(self.tmpdir,object=['^wheel$','.*Moves'],wildcards=False)self.assertCountEqual(alf_files,map(ALFPath,expected),'failed to filter by regex')
[docs]defsetUp(self)->None:# riri, fifi and loulou are huey, duey and louie in French (Donald nephews for ignorants)self.tmpdir=Path(tempfile.gettempdir())/'iotest'self.tmpdir.mkdir(exist_ok=True)self.vfile=self.tmpdir/'toto.titi.npy'self.tfile=self.tmpdir/'toto.timestamps.npy'self.object_files=[self.tmpdir/'neuveu.riri.npy',self.tmpdir/'neuveu.fifi.npy',self.tmpdir/'neuveu.loulou.npy',self.tmpdir/'object.attribute.part1.part2.npy',self.tmpdir/'object.attribute.part1.npy',self.tmpdir/'neuveu.foobar_matlab.npy']forfinself.object_files:shape=(5,1)if'matlab'instr(f)else(5,)np.save(file=f,arr=np.random.rand(*shape))self.object_files.append(self.tmpdir/'neuveu.timestamps.npy')np.save(file=self.object_files[-1],arr=np.ones((2,2)))# Save an obj.data pqt fileself.object_files.append(self.tmpdir/'obj.table.pqt')cols=['foo_0','foo_1','bar_0','bar_1','baz']pd.DataFrame(np.random.rand(10,5),columns=cols).to_parquet(self.object_files[-1])
[docs]deftest_exists(self):"""Test for one.alf.io.exists."""self.assertFalse(alfio.exists(self.tmpdir,'asodiujfas'))self.assertTrue(alfio.exists(self.tmpdir,'neuveu'))# test with attribute string onlyself.assertTrue(alfio.exists(self.tmpdir,'neuveu',attributes='riri'))# test with list of attributesself.assertTrue(alfio.exists(self.tmpdir,'neuveu',attributes=['riri','fifi']))self.assertFalse(alfio.exists(self.tmpdir,'neuveu',attributes=['riri','fifiasdf']))# test with extrasself.assertTrue(alfio.exists(self.tmpdir,'object',extra='part2'))self.assertTrue(alfio.exists(self.tmpdir,'object',extra=['part1','part2']))self.assertTrue(alfio.exists(self.tmpdir,'neuveu',extra=None))# test with wildcardsself.assertTrue(alfio.exists(self.tmpdir,'neu*',attributes='riri'))# globing with list: an empty part should return true as wellself.assertTrue(alfio.exists(self.tmpdir,'object',extra=['']))
[docs]deftest_metadata_columns(self):# simple test with meta data to label columnsfile_alf=self.tmpdir/'_ns_object.attribute.npy'data=np.random.rand(500,4)cols=['titi','tutu','toto','tata']np.save(file_alf,data)np.save(self.tmpdir/'_ns_object.gnagna.npy',data[:,-1])path=alfio.save_metadata(file_alf,{'columns':cols})self.assertIsInstance(path,ALFPath)self.assertEqual('_ns_object.attribute.metadata.json',path.name)dread=alfio.load_object(self.tmpdir,'object',namespace='ns',short_keys=False)self.assertTrue(np.all(dread['titi']==data[:,0]))self.assertTrue(np.all(dread['gnagna']==data[:,-1]))# add another field to the metadataalfio.save_metadata(file_alf,{'columns':cols,'unit':'potato'})dread=alfio.load_object(self.tmpdir,'object',namespace='ns',short_keys=False)self.assertTrue(np.all(dread['titi']==data[:,0]))self.assertTrue(dread['attributemetadata']['unit']=='potato')self.assertTrue(np.all(dread['gnagna']==data[:,-1]))
[docs]deftest_metadata_columns_UUID(self):data=np.random.rand(500,4)# test with UUID extra fieldfile_alf=self.tmpdir/'_ns_obj.attr1.2622b17c-9408-4910-99cb-abf16d9225b9.npy'np.save(file_alf,data)cols=['titi','tutu','toto','tata']file_meta=file_alf.parent/(file_alf.stem+'.metadata.json')withopen(file_meta,'w+')asfid:fid.write(json.dumps({'columns':cols},indent=1))dread=alfio.load_object(self.tmpdir,'obj',namespace='ns',short_keys=False)self.assertTrue(np.all(dread['titi']==data[:,0]))
[docs]deftest_read_ts(self):"""Test for one.alf.io.read_ts."""# simplest test possible with one column in each filet=np.arange(0,10)d=np.random.rand(10)np.save(self.vfile,d)np.save(self.tfile,t)t_,d_=alfio.read_ts(self.vfile)self.assertTrue(np.all(t_==t))self.assertTrue(np.all(d_==d))# Test expands timeseries and deals with single column 2D vectorst=np.array([[0,10],[0.3,0.4]]).Td=np.random.rand(10,1)np.save(self.vfile,d)np.save(self.tfile,t)t_,d_=alfio.read_ts(str(self.vfile))self.assertEqual(d_.ndim,1)expected=np.around(np.arange(t[0,1],t[1,1],.01)[:-1],2)np.testing.assert_array_equal(t_,expected)self.tfile.unlink()withself.assertRaises(FileNotFoundError):alfio.read_ts(self.vfile)
[docs]deftest_load_object(self):"""Test for one.alf.io.load_object."""# first usage of load object is to provide one of the files belonging to the objectexpected_keys={'riri','fifi','loulou','foobar_matlab','timestamps'}obj=alfio.load_object(self.object_files[0])self.assertTrue(obj.keys()==expected_keys)# Check flattens single column 2D vectorsself.assertTrue(all([obj[o].shape==(5,)foroinobj]))# the second usage is to provide a directory and the object nameobj=alfio.load_object(self.tmpdir,'neuveu')self.assertTrue(obj.keys()==expected_keys)self.assertTrue(all([obj[o].shape==(5,)foroinobj]))# providing directory without object will return all ALF fileswithself.assertRaises(ValueError)ascontext:alfio.load_object(self.tmpdir)self.assertTrue('object name should be provided too'instr(context.exception))# Check key conflictsnp.save(file=str(self.tmpdir/'neuveu.loulou.extra.npy'),arr=np.random.rand(5,))obj=alfio.load_object(self.tmpdir,'neuveu',short_keys=False)self.assertTrue('loulou.extra'inobj)withself.assertRaises(AssertionError):alfio.load_object(self.tmpdir,'neuveu',short_keys=True)# the third usage is to provide file listobj=alfio.load_object(self.object_files[:3],short_keys=False)self.assertEqual(3,len(obj))# Check dimension mismatchdata=np.random.rand(list(obj.values())[0].size+1)np.save(file=str(self.object_files[0]),arr=data)# Save a new sizewithself.assertLogs(logging.getLogger('one.alf.io'),logging.WARNING)aslog:alfio.load_object(self.tmpdir,'neuveu',short_keys=False)self.assertIn(str(data.shape),log.output[0])# Check loading of 'table' attributeobj=alfio.load_object(self.tmpdir,'obj')self.assertIsInstance(obj,alfio.AlfBunch)self.assertCountEqual(obj.keys(),['foo','bar','baz'])self.assertEqual(obj['foo'].shape,(10,2))self.assertEqual(obj['bar'].shape,(10,2))self.assertEqual(obj['baz'].shape,(10,))# Check behaviour on conflicting keysnp.save(self.tmpdir.joinpath('obj.baz.npy'),np.arange(len(obj['foo'])))new_obj=alfio.load_object(self.tmpdir,'obj')self.assertNotIn('table',new_obj)np.testing.assert_array_equal(new_obj['baz'],obj['baz'],'Table attribute should take precedent')# Check behaviour loading table with long keys / extra ALF partstable_file=next(self.tmpdir.glob('*table*'))new_name=table_file.stem+'_clock.extra'+table_file.suffixtable_file.rename(table_file.parent.joinpath(new_name))new_obj=alfio.load_object(self.tmpdir,'obj')expected=['baz','baz_clock.extra','bar_clock.extra','foo_clock.extra']self.assertCountEqual(expected,new_obj.keys())
[docs]deftest_ls(self):"""Test for one.alf.io._ls."""# Test listing all ALF files in a directoryalf_files,_=alfio._ls(self.tmpdir)self.assertIsInstance(alf_files[0],ALFPath)self.assertEqual(8,len(alf_files))# Test with filepathalf_files,parts=alfio._ls(sorted(alf_files)[0])self.assertEqual(5,len(alf_files))self.assertTrue(all(x[1]=='neuveu')forxinparts)# Test non-existentwithself.assertRaises(ALFObjectNotFound):alfio._ls(self.tmpdir.joinpath('foobar'))
[docs]deftest_save_npy(self):"""Test for one.alf.io.save_npy."""# test with straight vectorsa={'riri':np.random.rand(100),'fifi':np.random.rand(100)}alfio.save_object_npy(self.tmpdir,a,'neuveux')# read after writeb=alfio.load_object(self.tmpdir,'neuveux')forkina:self.assertTrue(np.all(a[k]==b[k]))# test with more exotic shapes, still valida={'riri':np.random.rand(100),'fifi':np.random.rand(100,2),'loulou':np.random.rand(1,2)}alfio.save_object_npy(self.tmpdir,a,'neuveux')# read after writeb=alfio.load_object(self.tmpdir,'neuveux')forkina:self.assertTrue(np.all(a[k]==b[k]))# test with non allowed shapea={'riri':np.random.rand(100),'fifi':np.random.rand(100,2),'loulou':np.random.rand(5,2)}withself.assertRaises(Exception)ascontext:alfio.save_object_npy(self.tmpdir,a,'neuveux')self.assertTrue('Dimensions are not consistent'instr(context.exception))
[docs]deftest_check_dimensions(self):"""Test for one.alf.io.check_dimensions."""a={'a':np.ones([10,10]),'b':np.ones([10,2]),'c':np.ones([10])}status=alfio.check_dimensions(a)self.assertTrue(status==0)a={'a':np.ones([10,10]),'b':np.ones([10,1]),'c':np.ones([10])}status=alfio.check_dimensions(a)self.assertTrue(status==0)a={'a':np.ones([10,15]),'b':np.ones([1,15]),'c':np.ones([10])}status=alfio.check_dimensions(a)self.assertTrue(status==0)a={'a':np.ones([9,10]),'b':np.ones([10,1]),'c':np.ones([10])}status=alfio.check_dimensions(a)self.assertTrue(status==1)# test for timestamps which is an exception to the rulea={'a':np.ones([10,15]),'b':np.ones([1,15]),'c':np.ones([10])}a['timestamps']=np.ones([2,2])a['timestamps_titi']=np.ones([10,1])status=alfio.check_dimensions(a)self.assertTrue(status==0)a['timestamps']=np.ones([2,4])status=alfio.check_dimensions(a)self.assertTrue(status==1)
[docs]deftest_ts2vec(self):"""Test for one.alf.io.ts2vec."""n=10# Test interpolatets=np.array([[0,10],[0,100]]).Tts_=alfio.ts2vec(ts,n)np.testing.assert_array_equal(ts_.astype(int),np.arange(0,100,10,dtype=int))# Test flattents=np.ones((n,1))ts_=alfio.ts2vec(ts,n)np.testing.assert_array_equal(ts_,np.ones(n))# Test identitynp.testing.assert_array_equal(ts_,alfio.ts2vec(ts_,n))# Test ValueErrorwithself.assertRaises(ValueError):alfio.ts2vec(np.empty((n,2,3)),n)
[docs]deftest_load_file_content(self):"""Test for one.alf.io.load_file_content."""self.assertIsNone(alfio.load_file_content(self.empty))# csv / ssv / tsv filesself.assertIsInstance(alfio.load_file_content(self.npy),np.ndarray)forfilein(self.csv,self.ssv,self.tsv):withself.subTest('Loading text files',delim=file.suffix):loaded=alfio.load_file_content(file)self.assertEqual(3,loaded.size)self.assertCountEqual(loaded.columns,['a','b','c'])# a single column file should be squeezedloaded=alfio.load_file_content(self.csvuids)self.assertEqual(loaded.shape,(6,))loaded=alfio.load_file_content(self.json1)self.assertCountEqual(loaded.keys(),['a','b'])self.assertIsNone(alfio.load_file_content(self.json2))loaded=alfio.load_file_content(self.json3)self.assertCountEqual(loaded,['a','b'])# Load a parquet filepqt=next(Path(__file__).parents[1].joinpath('fixtures').glob('*.pqt'))loaded=alfio.load_file_content(pqt)self.assertIsInstance(loaded,pd.DataFrame)# Unknown file should return ALFPathfile=alfio.load_file_content(str(self.xyz))self.assertEqual(file,self.xyz)self.assertIsInstance(file,ALFPath)self.assertIsNone(alfio.load_file_content(None))# Load YAML fileloaded=alfio.load_file_content(str(self.yaml))self.assertCountEqual(loaded.keys(),['a','b'])# Load npz fileloaded=alfio.load_file_content(str(self.npz1))self.assertIsInstance(loaded,np.ndarray,'failed to unpack')self.assertEqual(loaded.shape,(5,))loaded=alfio.load_file_content(str(self.npz2))self.assertIsInstance(loaded,np.lib.npyio.NpzFile,'failed to return npz array')self.assertEqual(loaded['arr_0'].shape,(5,))
[docs]@unittest.skipIf(SKIP_SPARSE,'pydata sparse package not installed')classTestsLoadFileNonStandard(unittest.TestCase):"""Tests for one.alf.io.load_fil_content function with non-standard libraries."""
[docs]deftest_remove_empty_folders(self):"""Test for one.alf.io.remove_empty_folders."""root=Path(self.tempdir.name)/'glob_dir'root.mkdir()root.joinpath('empty0').mkdir(exist_ok=True)root.joinpath('full0').mkdir(exist_ok=True)root.joinpath('full0','file.txt').touch()self.assertTrue(len(list(root.glob('*')))==2)alfio.remove_empty_folders(root)self.assertTrue(len(list(root.glob('*')))==1)
[docs]deftest_iter_sessions(self):"""Test for one.alf.io.iter_sessions."""# Create invalid session folderself.session_path.parent.parent.joinpath('bad_session').mkdir()valid_sessions=alfio.iter_sessions(self.tempdir.name)self.assertEqual(next(valid_sessions),self.session_path)self.assertFalse(next(valid_sessions,False))# makes sure that the session path returns itself on the iteratorpath=next(alfio.iter_sessions(self.session_path))self.assertEqual(self.session_path,path)self.assertIsInstance(path,ALFPath)# test pattern argvalid_sessions=list(alfio.iter_sessions(self.tempdir.name,pattern='*/Subjects/*/????-??-??/*'))self.assertEqual([ALFPath(self.session_path)],valid_sessions)subjects_path=Path(self.tempdir.name,'fakelab','Subjects')valid_sessions=alfio.iter_sessions(subjects_path,pattern='*/????-??-??/*')self.assertEqual(self.session_path,next(valid_sessions))valid_sessions=alfio.iter_sessions(subjects_path,pattern='*/Subjects/*/????-??-??/*')self.assertFalse(next(valid_sessions,False))
[docs]deftest_iter_datasets(self):"""Test for one.alf.io.iter_datasets."""# Create valid datasetdset=self.session_path.joinpath('collection','object.attribute.ext')dset.parent.mkdir()dset.touch()# Create invalid datasetself.session_path.joinpath('somefile.txt').touch()ses_files=list(alfio.iter_datasets(self.session_path))self.assertEqual([Path(*dset.parts[-2:])],ses_files)self.assertIsInstance(ses_files[0],ALFPath)
[docs]deftest_unique(self):"""Test for one.alf.io.find_variants function."""dupes=alfio.find_variants(self.dsets)self.assertCountEqual(self.dsets,dupes.keys(),'expected keys to match input files')self.assertFalse(any(map(any,dupes.values())),'expected no duplicates')paths=filter(None,(*dupes.keys(),*dupes.values()))self.assertTrue(all(isinstance(x,ALFPath)forxinpaths))# With extra=False should treat files with extra parts as a variantdupes=alfio.find_variants(self.dsets,extra=False)# 'bar.baz.abc.npy' is a variant of '#revision#/bar.baz.def.npy' and vice versaself.assertEqual(dupes[self.dsets[2]],[self.dsets[4]])self.assertEqual(dupes[self.dsets[4]],[self.dsets[2]])# Expect all other datasets to be considered uniqueothers=[vfork,vindupes.items()ifknotin(self.dsets[2],self.dsets[4])]self.assertFalse(any(map(any,others)))# Treat other file parts as variantsfiles=[self.dsets[0],self.dsets[2],self.dsets[-1]]dupes=alfio.find_variants(files,namespace=False,timescale=False,extra=False)expected_files=(self.dsets[1:2],self.dsets[3:5],[])# expected variants for each fileforkey,expectedinzip(files,expected_files):withself.subTest(key=key):self.assertCountEqual(dupes[self.session_path.joinpath(key)],expected)