[importers] Add localizer importer
authorYannick Schwartz
Tue, 11 Dec 2012 18:49:06 +0100
changeset 12 f692e9e76067
parent 11 8e7fc06df352
child 13 e5efe62c12c4
[importers] Add localizer importer
importers/localizer.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/importers/localizer.py	Tue Dec 11 18:49:06 2012 +0100
@@ -0,0 +1,714 @@
+# -*- coding: utf-8 -*-
+
+import os
+import re
+import glob
+import csv
+import json
+import pickle
+from datetime import datetime
+
+import nibabel as nb
+
+gender_map = {
+    '1': u'male',
+    '2': u'female',
+    '3': u'unknown'
+}
+
+handedness_map = {
+    'Right handed': u'right',
+    'Left handed': u'left',
+    'Ambidextrous': u'both',
+    'Unknown': u'unknown',
+}
+
+def rel_path(path):
+    return os.path.join(
+        '/localizer',
+        path.split('localizer')[-1].strip('/'))
+
+
+def get_image_info(image_path, get_tr=True):
+    img = nb.load(image_path)
+    data = {}
+    data['voxel_res_x'] = int(img.get_header()['pixdim'][1])
+    data['voxel_res_y'] = int(img.get_header()['pixdim'][2])
+    data['voxel_res_z'] = int(img.get_header()['pixdim'][3])
+    data['shape_x'] = int(img.get_shape()[0])
+    data['shape_y'] = int(img.get_shape()[1])
+    data['shape_z'] = int(img.get_shape()[2])
+    data['shape_t'] = int(img.get_shape()[3]) if len(img.get_shape()) == 4 else None
+    data['affine'] = pickle.dumps(img.get_affine().tolist())
+    desc = str(img.get_header()['descrip'])
+
+    try:
+        if get_tr:
+            tr, te = re.findall(
+                'TR=(.*)ms.*TE=(.*)ms', desc)[0]
+            data['tr'] = float(tr)
+            data['te'] = float(te)
+    except Exception, e:
+        data['tr'] = None
+        data['te'] = None
+
+    return data
+
+
+def import_subject(data_dir):
+    data = {}
+    score_values = []
+
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+    data['identifier'] = info['nip']
+    data['age'] = info['age']
+    data['gender'] = gender_map.get(
+        info['sex'], gender_map['3'])
+    data['age'] = info['age']
+    data['handedness'] = handedness_map.get(
+        info['laterality'],
+        handedness_map['Unknown'])
+
+    score_types = [u'language', u'family',
+                   u'schizophrenic', u'dyslexic',
+                   u'dyscalculic', u'synaesthete']
+
+    for score in score_types:
+        score_values.append(
+            {'name': score,
+             'value': info.get(score)})
+
+    return data, score_values
+
+
+def import_study(data_dir=None):
+    data = {}
+    data['name'] = u'localizer'
+    data['description'] = u'localizer db'
+
+    return data
+
+
+def import_anat(data_dir, normalized=False):
+    scan_data = {}
+    mri_data = {}
+
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+
+    scan_data['identifier'] = u'%s_anat' % info['exam']
+    scan_data['label'] = u'anatomy' if normalized else u'raw anatomy'
+    scan_data['format'] = u'nii.gz'
+    if normalized:
+        scan_data['type'] = u'normalized T1'
+    else:
+        scan_data['type'] = u'raw T1'
+    if normalized:
+        scan_data['filepath'] = os.path.join(
+            data_dir, 'anat', 'anat.nii.gz')
+    else:
+        scan_data['filepath'] = os.path.join(
+            data_dir, 'anat', 'raw_anat.nii.gz')
+    scan_data['timepoint'] = info['date']
+    scan_data['completed'] = True
+    scan_data['valid'] = True
+    scan_data['description'] = info['anatomy']
+
+    mri_data['sequence'] = u'T1'
+    mri_data.update(
+        get_image_info(scan_data['filepath']))
+
+    scan_data['filepath'] = rel_path(
+        scan_data['filepath'])
+
+    return scan_data, mri_data
+
+
+def import_fmri(data_dir, preprocessed=False):
+    scan_data = {}
+    mri_data = {}
+
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+
+    scan_data['identifier'] = u'%s_fmri' % info['exam'] if preprocessed \
+        else u'%s_raw_fmri' % info['exam']
+    scan_data['label'] = u'bold' if preprocessed else u'raw bold'
+    scan_data['format'] = u'nii.gz'
+    if preprocessed:
+        scan_data['type'] = u'preprocessed fMRI'
+    else:
+        scan_data['type'] = u'raw fMRI'
+    if preprocessed:
+        scan_data['filepath'] = os.path.join(
+            data_dir, 'fmri', 'bold.nii.gz')
+    else:
+        scan_data['filepath'] = os.path.join(
+            data_dir, 'fmri', 'raw_bold.nii.gz')
+    scan_data['timepoint'] = info['date']
+    scan_data['completed'] = True
+    scan_data['valid'] = True
+    scan_data['description'] = (
+        u'epi_problem=%(epi_problem)s '
+        'sound_problem=%(sound_problem)s '
+        'video_problem=%(video_problem)s '
+        'motor_error=%(motor_error)s' % info)
+
+    seq_types = [u'localizer_long_complex',
+                 u'localizer_long_easy',
+                 u'localizer_short_complex',
+                 u'localizer_short_easy']
+
+    for seq_type in seq_types:
+        if info[seq_type]:
+            scan_data['description'] += u' %s' % seq_type
+
+    mri_data['sequence'] = u'EPI'
+
+    mri_data.update(
+        get_image_info(scan_data['filepath']))
+
+    scan_data['filepath'] = rel_path(
+        scan_data['filepath'])
+
+    return scan_data, mri_data
+
+
+def import_assessment(data_dir, label):
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+
+    data = {}
+    data['identifier'] = u'%s_%s' % (info['nip'], label)
+    data['protocol'] = info['protocol']
+    data['timepoint'] = info['date']
+
+    if info.get('date'):
+        data['datetime'] = datetime.strptime(
+            info['date'],
+            '%Y-%m-%d %H:%M:%S')
+    else:
+        data['datetime'] = None
+    return data
+
+
+def import_maps(data_dir, dtype='c'):
+
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+
+    base_path = os.path.join(data_dir, '%s_maps' % dtype)
+
+    for img_path in glob.iglob(os.path.join(base_path, '*.nii.gz')):
+        scan_data = {}
+        mri_data = {}
+
+        scan_data['identifier'] = u'%s_%s_map' % (info['exam'], dtype)
+        scan_data['label'] = unicode(os.path.split(
+            img_path)[1].split('.nii.gz')[0].replace('_', ' '))
+        scan_data['format'] = u'nii.gz'
+        scan_data['type'] = u'%s map' % dtype
+        scan_data['filepath'] = img_path
+        scan_data['timepoint'] = info['date']
+        scan_data['completed'] = True
+        scan_data['valid'] = True
+
+        mri_data['sequence'] = None
+        mri_data.update(
+            get_image_info(scan_data['filepath'], get_tr=False))
+
+        scan_data['filepath'] = rel_path(
+            scan_data['filepath'])
+
+        name = os.path.split(img_path)[1].split('.nii.gz')[0]
+
+        ext_resource = {}
+        ext_resource['name'] = u'contrast definition'
+        ext_resource['filepath'] = unicode(rel_path(os.path.join(
+            data_dir, 'contrasts', '%s.json' % name)))
+
+        yield scan_data, mri_data, ext_resource
+
+
+def import_mask(data_dir):
+    scan_data = {}
+    mri_data = {}
+
+    info = json.load(
+        open('%s/subject.json' % data_dir))
+
+    scan_data['identifier'] = u'%s_mask' % info['exam']
+    scan_data['label'] = u'mask'
+    scan_data['format'] = u'nii.gz'
+    scan_data['type'] = u'boolean mask'
+    scan_data['filepath'] = unicode(os.path.join(data_dir, 'mask.nii.gz'))
+    scan_data['timepoint'] = info['date']
+    scan_data['completed'] = True
+    scan_data['valid'] = True
+    mri_data['sequence'] = None
+    mri_data.update(
+        get_image_info(scan_data['filepath']))
+
+    scan_data['filepath'] = rel_path(
+        scan_data['filepath'])
+
+    return scan_data, mri_data
+
+
+def import_questionnaire(data_dir):
+    questionnaire = {}
+    questionnaire['name'] = u'localizer questionnaire'
+    questionnaire['identifier'] = u'localizer_questionnaire'
+    questionnaire['type'] = u'behavioural'
+    questionnaire['version'] = u'1.0'
+    questionnaire['language'] = u'French'
+
+    behave = json.load(
+        open('%s/behavioural.json' % data_dir))
+    del behave['date']
+
+    questions = []
+    values = [behave[k] for k in sorted(behave.keys())]
+    for i, (val, item) in enumerate(zip(values, sorted(behave.keys()))):
+        question = {}
+        question['identifier'] = u'localizer_%s' % i
+        question['position'] = i
+        question['text'] = unicode(item)
+        question['type'] = u'boolean' if isinstance(val, bool) else u'float'
+        question['possible_answers'] = None
+
+        questions.append(question)
+
+    return questionnaire, questions
+
+
+def import_questionnaire_run(data_dir, questionnaire_id, questions_id):
+    run = {}
+
+    behave = json.load(
+        open('%s/behavioural.json' % data_dir))
+    sid = os.path.split(data_dir)[1]
+    run['identifier'] = u'localizer_questionnaire_%s' % (sid)
+    run['user_ident'] = u'subject'
+    if behave.get('date'):
+        run['datetime'] = datetime.strptime(
+            behave['date'],
+            '%Y-%m-%d %H:%M:%S')
+    else:
+        run['datetime'] = None
+    del behave['date']
+    del behave['nip']
+    run['iteration'] = 1
+    run['completed'] = True
+    run['valid'] = True
+    run['instance_of'] = questionnaire_id
+    answers = []
+
+    values = [behave[k] for k in sorted(behave.keys())]
+
+    for i, (val, item) in enumerate(zip(values, sorted(behave.keys()))):
+        answer = {}
+        # XXX: handle str answers
+        if not isinstance(val, (str, unicode)):
+            answer['value'] = float(val) if val else None
+            answer['datetime'] = run['datetime']
+            answer['question'] = questions_id[item]
+            answers.append(answer)
+
+    return run, answers
+
+
+def import_center(data_dir):
+    data = {}
+    info = json.load(open('%s/subject.json' % data_dir))
+    data['identifier'] = info['site']
+
+    if info['site'] == u'SHFJ':
+        data['name'] = u'SHFJ'
+        data['department'] = u'Essonne'
+        data['city'] = u'Orsay'
+        data['postal_code'] = 91401
+        data['country'] = u'France'
+    elif info['site'] == u'Neurospin':
+        data['name'] = u'Neurospin'
+        data['department'] = u'Essonne'
+        data['city'] = u'Gif-sur-Yvette'
+        data['postal_code'] = 91191
+        data['country'] = u'France'
+
+    return data
+
+
+def import_device(data_dir):
+    data = {}
+    info = json.load(open('%s/subject.json' % data_dir))
+
+    if info['site'] == u'Neurospin':
+        data['name'] = '3T SIEMENS Trio'
+        data['manufacturer'] = 'SIEMENS'
+        data['model'] = 'Trio'
+        data['hosted_by'] = 'Neurospin'
+    if info['site'] == u'SHFJ':
+        data['name'] = '3T Brucker'
+        data['manufacturer'] = 'Brucker'
+        data['model'] = '3T Brucker'
+        data['hosted_by'] = 'SHFJ'
+
+    return data
+
+
+def import_genes(genetics_dir):
+    ref_chr_path = os.path.join(genetics_dir, 'chromosomes.json')
+    ref_gene_path = os.path.join(
+        genetics_dir, 'sprintBack', 'hg18.refGene.meta')
+    chromosomes = json.load(open(ref_chr_path))
+    ref_gene = []
+    for row in csv.reader(open(ref_gene_path), delimiter='\t'):
+        gene = {}
+        gene['name'] = unicode(row[0])
+        gene['gene_id'] = unicode(row[0])
+        gene['uri'] = None
+        gene['start_position'] = int(row[1])
+        gene['stop_position'] = int(row[2])
+        gene['chromosome'] = row[3].split('_')[0]
+        ref_gene.append(gene)
+    return ref_gene
+
+
+def import_chromosomes(genetics_dir):
+    ref_chr_path = os.path.join(genetics_dir, 'chromosomes.json')
+    chromosomes = json.load(open(ref_chr_path))
+    chrs = []
+    for chr_id in chromosomes:
+        chr = {}
+        chr['name'] = u'chr%s' % chromosomes[chr_id].upper()
+        chr['identifier'] = unicode(chr['name'])
+
+        chrs.append(chr)
+    return chrs
+
+
+def import_snps(genetics_dir):
+    ref_chr_path = os.path.join(genetics_dir, 'chromosomes.json')
+    ref_snp_path = os.path.join(
+        genetics_dir, 'sprintBack', 'bysubj', 'bru3506.bim')
+    chromosomes = json.load(open(ref_chr_path))
+    snps = []
+    for row in csv.reader(open(ref_snp_path), delimiter='\t'):
+        snp = {}
+        if row[0] == '0':
+            continue
+
+        chr_id = chromosomes[row[0]]
+
+        snp['rs_id'] = unicode(row[1])
+        snp['position'] = int(row[3])
+        snp['chromosome'] = u'chr%s' % chr_id.upper()
+
+        snps.append(snp)
+    return snps
+
+
+def import_genomic_measures(genetics_dir):
+    measure_path = os.path.join(genetics_dir, 'sprintBack', 'bysubj')
+    g_measures = {}
+
+    for path in glob.glob(os.path.join(measure_path, '*.bim')):
+        subject_id = os.path.split(path)[1].split('.bim')[0]
+        genomic_measure = {}
+        genomic_measure['identifier'] = u'genomic_measure_%s' % subject_id
+        genomic_measure['type'] = u'SNP'
+        genomic_measure['format'] = u'plink'
+        genomic_measure['filepath'] = unicode(rel_path(path.split('.bim')[0]))
+        genomic_measure['chip_serialnum'] = None
+        genomic_measure['completed'] = True
+        genomic_measure['valid'] = True
+        genomic_measure['platform'] = None
+        g_measures[subject_id] = genomic_measure
+    return g_measures
+
+
+if __name__ == '__main__':
+    from cubicweb.dataimport import NoHookRQLObjectStore as Store
+    from cubicweb.dataimport import SQLGenObjectStore as Store
+
+    root_dir = '/opt/localizer/localizer/subjects'
+    genetics_dir = '/opt/localizer/localizer/genetics'
+
+    # for sid in glob.glob('%s/*' % root_dir):
+    #     print os.path.split(sid)[1]
+    #     print import_devices(sid)
+    #     print import_center(sid)
+
+    store = Store(session)
+
+    # ---------------------------------------------------------------------
+    # Questionnaire
+    # ---------------------------------------------------------------------
+
+    one_subject = glob.glob('%s/*' % root_dir)[0]
+    questionnaire, questions = import_questionnaire(one_subject)
+    questionnaire = store.create_entity('Questionnaire', **questionnaire)
+    questions_id = {}
+    for question in questions:
+        question['questionnaire'] = questionnaire.eid
+        question = store.create_entity('Question', **question)
+        questions_id[question.text] = question.eid
+
+    score_defs = {}
+    centers = {}
+    devices = {}
+
+    # ---------------------------------------------------------------------
+    # Study
+    # ---------------------------------------------------------------------
+
+    study = import_study()
+    study = store.create_entity('Study', **study)
+
+    # ---------------------------------------------------------------------
+    # Genetics
+    # ---------------------------------------------------------------------
+
+    print
+    print 'Import genetic data...'
+    print '-' * 80
+
+    chrs = import_chromosomes(genetics_dir)
+    chr_map = {}
+    for chr in chrs:
+        print 'chr', chr['name']
+        chr = store.create_entity('Chromosome', **chr)
+        chr_map.setdefault(chr['name'], chr.eid)
+
+    print chr_map
+
+    genes = import_genes(genetics_dir)
+    for gene in genes:
+        print 'gene', gene['name'], gene['chromosome']
+        gene['chromosome'] = chr_map[gene['chromosome']]
+        gene = store.create_entity('Gene', **gene)
+
+    snps = import_snps(genetics_dir)
+    snp_eids = []
+    for snp in snps:
+        print 'snp', snp['rs_id']
+        snp['chromosome'] = chr_map[snp['chromosome']]
+        snp = store.create_entity('Snp', **snp)
+        snp_eids.append(snp.eid)
+
+    platform = {
+        'identifier': 'Affymetrix_6.0',
+        }
+
+    platform = store.create_entity('GenomicPlatform', **platform)
+    for snp_eid in snp_eids:
+        store.relate(platform.eid, 'related_snps', snp_eid)
+
+
+    gen_measures = import_genomic_measures(genetics_dir)
+
+    print
+    print 'Import subject data...'
+    print '-' * 80
+
+    store.flush()
+
+    for sid in glob.glob('%s/*' % root_dir):
+        print os.path.split(sid)[1]
+
+        # ---------------------------------------------------------------------
+        # Center & devices
+        # ---------------------------------------------------------------------
+
+        center = import_center(sid)
+        if center['name'] not in centers:
+            center = store.create_entity('Center', **center)
+            centers.setdefault(center.name, center.eid)
+            center_eid = center.eid
+        else:
+            center_eid = centers[center['name']]
+        device = import_device(sid)
+        device['hosted_by'] = centers[device['hosted_by']]
+
+        if device['name'] not in devices:
+            device = store.create_entity('Device', **device)
+            devices.setdefault(device.name, device.eid)
+            device_id = device.eid
+        else:
+            device_id = devices[device['name']]
+
+        # ---------------------------------------------------------------------
+        # Subject
+        # ---------------------------------------------------------------------
+
+        subject, score_values = import_subject(sid)
+        subject = store.create_entity('Subject', **subject)
+        store.relate(subject.eid, 'related_studies', study.eid)
+
+        dm_res = store.create_entity(
+            'ExternalResource',
+            name=u'design_matrix',
+            filepath=unicode(rel_path(os.path.join(sid, 'design_matrix.json'))))
+
+        for score_val in score_values:
+            value = score_val['value']
+            if not value:
+                continue
+
+            if score_val['name'] in score_defs:
+                def_eid = score_defs.get(score_val['name'])
+            else:
+                score_def = {}
+                score_def['name'] = score_val['name']
+                score_def['category'] = u'demographics'
+                score_def['type'] = u'string'
+                score_def = store.create_entity('ScoreDef', **score_def)
+                score_defs[score_val['name']] = score_def.eid
+                def_eid = score_def.eid
+
+            score_val = store.create_entity('ScoreValue', definition=def_eid, value=value)
+            store.relate(subject.eid, 'related_infos', score_val.eid)
+
+        # ---------------------------------------------------------------------
+        # Genetics
+        # ---------------------------------------------------------------------
+        gen_assessment = import_assessment(sid, 'genetics')
+        gen_assessment = store.create_entity('Assessment', **gen_assessment)
+        store.relate(subject.eid, 'concerned_by', gen_assessment.eid)
+        measure = gen_measures[subject.identifier]
+        measure['platform'] = platform.eid
+        measure = store.create_entity('GenomicMeasure', **measure)
+        store.relate(measure.eid, 'concerns', subject.eid, 'GenomicMeasure')
+        store.relate(gen_assessment.eid, 'generates', measure.eid)
+
+
+        # ---------------------------------------------------------------------
+        # Anat & fMRI
+        # ---------------------------------------------------------------------
+
+        # anat assessment
+        anat_assessment = import_assessment(sid, 'anat')
+        anat_assessment = store.create_entity('Assessment', **anat_assessment)
+        store.relate(center_eid, 'holds', anat_assessment.eid)
+        store.relate(subject.eid, 'concerned_by', anat_assessment.eid)
+
+
+        # raw anat
+        scan_anat, mri_anat = import_anat(sid)
+        mri_anat = store.create_entity('MRIData', **mri_anat)
+        scan_anat['has_data'] = mri_anat.eid
+        scan_anat = store.create_entity('Scan', **scan_anat)
+        store.relate(scan_anat.eid, 'concerns', subject.eid, 'Scan')
+        store.relate(scan_anat.eid, 'uses_device', device_id)
+        store.relate(anat_assessment.eid, 'generates', scan_anat.eid)
+
+        # normalized anat
+        scan_anat, mri_anat = import_anat(sid, True)
+        mri_anat = store.create_entity('MRIData', **mri_anat)
+        scan_anat['has_data'] = mri_anat.eid
+        scan_anat = store.create_entity('Scan', **scan_anat)
+        store.relate(scan_anat.eid, 'concerns', subject.eid, 'Scan')
+        store.relate(scan_anat.eid, 'uses_device', device_id)
+        store.relate(anat_assessment.eid, 'generates', scan_anat.eid)
+
+        # fmri assessment
+        fmri_assessment = import_assessment(sid, 'fmri')
+        fmri_assessment = store.create_entity('Assessment', **fmri_assessment)
+        store.relate(center_eid, 'holds', fmri_assessment.eid)
+        store.relate(subject.eid, 'concerned_by', fmri_assessment.eid)
+
+        # raw bold
+        scan_fmri, mri_fmri = import_fmri(sid)
+        mri_fmri = store.create_entity('MRIData', **mri_fmri)
+        scan_fmri['has_data'] = mri_fmri.eid
+        scan_fmri = store.create_entity('Scan', **scan_fmri)
+        store.relate(scan_fmri.eid, 'concerns', subject.eid, 'Scan')
+        store.relate(scan_fmri.eid, 'uses_device', device_id)
+        store.relate(fmri_assessment.eid, 'generates', scan_fmri.eid)
+
+        # preproc bold
+        scan_fmri, mri_fmri = import_fmri(sid, True)
+        mri_fmri = store.create_entity('MRIData', **mri_fmri)
+        scan_fmri['has_data'] = mri_fmri.eid
+        scan_fmri = store.create_entity('Scan', **scan_fmri)
+        store.relate(scan_fmri.eid, 'concerns', subject.eid, 'Scan')
+        store.relate(scan_fmri.eid, 'uses_device', device_id)
+        store.relate(fmri_assessment.eid, 'generates', scan_fmri.eid)
+
+        # ---------------------------------------------------------------------
+        # Maps
+        # ---------------------------------------------------------------------
+
+        # c_maps assessment
+        assessment = import_assessment(sid, 'c_maps')
+        assessment = store.create_entity('Assessment', **assessment)
+        store.relate(center_eid, 'holds', assessment.eid)
+        store.relate(subject.eid, 'concerned_by', assessment.eid)
+
+        for scan, mri, con_res in import_maps(sid, 'c'):
+            mri = store.create_entity('MRIData', **mri)
+            scan['has_data'] = mri.eid
+            scan = store.create_entity('Scan', **scan)
+            con_res = store.create_entity('ExternalResource', **con_res)
+            store.relate(scan.eid, 'concerns', subject.eid, 'Scan')
+            store.relate(scan.eid, 'uses_device', device_id)
+            store.relate(assessment.eid, 'generates', scan.eid)
+            store.relate(scan.eid, 'external_resources', con_res.eid)
+            store.relate(scan.eid, 'external_resources', dm_res.eid)
+
+        # t_maps assessment
+        assessment = import_assessment(sid, 't_maps')
+        assessment = store.create_entity('Assessment', **assessment)
+        store.relate(center_eid, 'holds', assessment.eid)
+        store.relate(subject.eid, 'concerned_by', assessment.eid)
+
+        for scan, mri, con_res in import_maps(sid, 't'):
+            mri = store.create_entity('MRIData', **mri)
+            con_res = store.create_entity('ExternalResource', **con_res)
+            scan['has_data'] = mri.eid
+            scan = store.create_entity('Scan', **scan)
+            store.relate(scan.eid, 'concerns', subject.eid, 'Scan')
+            store.relate(scan.eid, 'uses_device', device_id)
+            store.relate(assessment.eid, 'generates', scan.eid)
+            store.relate(scan.eid, 'external_resources', con_res.eid)
+            store.relate(scan.eid, 'external_resources', dm_res.eid)
+
+        # ---------------------------------------------------------------------
+        # Mask
+        # ---------------------------------------------------------------------
+
+        assessment = import_assessment(sid, 'mask')
+        assessment = store.create_entity('Assessment', **assessment)
+        store.relate(center_eid, 'holds', assessment.eid)
+        store.relate(subject.eid, 'concerned_by', assessment.eid)
+        scan, mri = import_mask(sid)
+        mri = store.create_entity('MRIData', **mri)
+        scan['has_data'] = mri.eid
+        scan = store.create_entity('Scan', **scan)
+        store.relate(scan.eid, 'concerns', subject.eid, 'Scan')
+        store.relate(scan.eid, 'uses_device', device_id)
+        store.relate(assessment.eid, 'generates', scan.eid)
+
+        # ---------------------------------------------------------------------
+        # Questionnaire run
+        # ---------------------------------------------------------------------
+
+        assessment = import_assessment(sid, 'questionnaire')
+        assessment = store.create_entity('Assessment', **assessment)
+        store.relate(center_eid, 'holds', assessment.eid)
+        store.relate(subject.eid, 'concerned_by', assessment.eid)
+
+        run, answers = import_questionnaire_run(sid, questionnaire.eid, questions_id)
+        run = store.create_entity('QuestionnaireRun', **run)
+
+        for answer in answers:
+            answer['questionnaire_run'] = run.eid
+            answer = store.create_entity('Answer', **answer)
+
+        store.relate(run.eid, 'concerns', subject.eid, 'QuestionnaireRun')
+        store.relate(assessment.eid, 'generates', run.eid)
+
+    store.flush()
+    store.commit()