Skip to content

assembly

ensembl.io.genomio.assembly

Assembly preparation module.

FTPConnectionError

Bases: Exception

Error while initialising an FTP connection.

Source code in src/python/ensembl/io/genomio/assembly/download.py
56
57
class FTPConnectionError(Exception):
    """Error while initialising an FTP connection."""

FileDownloadError

Bases: Exception

When a file download fails or there is a problem with that file.

Source code in src/python/ensembl/io/genomio/assembly/download.py
52
53
class FileDownloadError(Exception):
    """When a file download fails or there is a problem with that file."""

UnsupportedFormatError

Bases: Exception

When a string does not have the expected format.

Source code in src/python/ensembl/io/genomio/assembly/download.py
60
61
class UnsupportedFormatError(Exception):
    """When a string does not have the expected format."""

download_files(ftp_connection, accession, dl_dir, max_redo)

Given an INSDC accession, download all available files from the ftp to the download dir

Parameters:

Name Type Description Default
ftp_connection FTP

An open FTP connection object

required
accession str

Genome assembly accession.

required
dl_dir Path

Path to downloaded FTP files.

required
max_redo int

Maximum FTP connection retry attempts.

required
Source code in src/python/ensembl/io/genomio/assembly/download.py
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
def download_files(ftp_connection: FTP, accession: str, dl_dir: Path, max_redo: int) -> None:
    """
    Given an INSDC accession, download all available files from the ftp to the download dir

    Args:
        ftp_connection: An open FTP connection object
        accession: Genome assembly accession.
        dl_dir: Path to downloaded FTP files.
        max_redo: Maximum FTP connection retry attempts.
    """

    # Get the list of assemblies for this accession
    for ftp_dir, _ in ftp_connection.mlsd():
        if re.search(accession, ftp_dir):
            ftp_connection.cwd(ftp_dir)

            # First, get the md5sum file
            md5_file = "md5checksums.txt"
            md5_path = dl_dir / md5_file
            with md5_path.open("wb") as fp:
                ftp_connection.retrbinary(f"RETR {md5_file}", fp.write)
            md5_sums = get_checksums(md5_path)

            # Get all the files
            for ftp_file, _ in ftp_connection.mlsd():
                for end in _FILE_ENDS:
                    if ftp_file.endswith(end) and not ftp_file.endswith(f"_from_{end}"):
                        _download_file(ftp_connection, ftp_file, md5_sums, dl_dir, max_redo)
        else:
            logging.warning(
                f"Could not find accession '{accession}' from ftp {ftp_dir} in open FTP connection"
            )

establish_ftp(ftp_conn, ftp_url, accession)

Return an FTP connection based on the provided accession and sub_dir.

Parameters:

Name Type Description Default
ftp_conn FTP

FTP class object.

required
ftp_url str

Specific FTP URL in connection request.

required
accession str

Genome accession required data for download.

required

Raises:

Type Description
UnsupportedFormatError

If accession does not follow INSDC's accession format.

Source code in src/python/ensembl/io/genomio/assembly/download.py
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
def establish_ftp(ftp_conn: FTP, ftp_url: str, accession: str) -> FTP:
    """Return an FTP connection based on the provided `accession` and `sub_dir`.

    Args:
        ftp_conn: FTP class object.
        ftp_url: Specific FTP URL in connection request.
        accession: Genome accession required data for download.

    Raises:
        UnsupportedFormatError: If `accession` does not follow INSDC's accession format.
    """

    match = re.match(r"^(GC[AF])_([0-9]{3})([0-9]{3})([0-9]{3})(\.[0-9]+)?$", accession)
    if not match:
        raise UnsupportedFormatError(f"Could not recognize GCA accession format: {accession}")
    gca = match.group(1)
    part1 = match.group(2)
    part2 = match.group(3)
    part3 = match.group(4)
    sub_dir = Path("genomes", "all", gca, part1, part2, part3)

    # Try now to establish connection to remote FTP server
    ftp_conn.connect(ftp_url)
    ftp_conn.login()
    ftp_conn.cwd(str(sub_dir))

    return ftp_conn

extract_assembly_metadata(assembly_reports)

Parse assembly reports and extract specific key information on status and related fields.

Parameters:

Name Type Description Default
assembly_reports dict[str, dict]

Key value pair of source name <> assembly report.

required

Returns:

Type Description
dict[str, ReportStructure]

Parsed assembly report meta (source, meta).

Source code in src/python/ensembl/io/genomio/assembly/status.py
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
def extract_assembly_metadata(assembly_reports: dict[str, dict]) -> dict[str, ReportStructure]:
    """Parse assembly reports and extract specific key information on status and related fields.

    Args:
        assembly_reports: Key value pair of source name <> assembly report.

    Returns:
        Parsed assembly report meta (source, meta).
    """
    parsed_meta = {}

    for source, asm_report in assembly_reports.items():
        asm_meta_info = ReportStructure()

        # Mandatory meta key parsing:
        asm_meta_info.accession = asm_report["accession"]
        asm_meta_info.assembly_name = asm_report["assembly_info"]["assembly_name"]
        asm_meta_info.assembly_type = asm_report["assembly_info"]["assembly_type"]
        asm_meta_info.assembly_status = asm_report["assembly_info"]["assembly_status"]
        asm_meta_info.species_name = asm_report["organism"]["organism_name"]
        asm_meta_info.taxon_id = int(asm_report["organism"]["tax_id"])

        ## Non-mandatory meta key parsing:
        assembly_meta_keys = asm_report["assembly_info"].keys()
        organism_keys = asm_report["organism"].keys()

        # check for genome_notes:
        if "genome_notes" in assembly_meta_keys:
            complete_notes = ", ".join(asm_report["assembly_info"]["genome_notes"])
            asm_meta_info.assembly_notes = complete_notes

        # check for biosample:
        if "biosample" in assembly_meta_keys:
            asm_meta_info.last_updated = asm_report["assembly_info"]["biosample"]["last_updated"]

        # check for paired assembly:
        if "paired_assembly" in assembly_meta_keys:
            asm_meta_info.paired_assembly = asm_report["assembly_info"]["paired_assembly"]["accession"]

        # check for isolate/strain type:
        if "infraspecific_names" in organism_keys:
            organism_type_keys = asm_report["organism"]["infraspecific_names"].keys()
            if "isolate" in organism_type_keys:
                asm_meta_info.strain = asm_report["organism"]["infraspecific_names"]["isolate"]
            elif "strain" in organism_type_keys:
                asm_meta_info.strain = asm_report["organism"]["infraspecific_names"]["strain"]

        parsed_meta[source] = asm_meta_info

    return parsed_meta

fetch_accessions_from_core_dbs(src_file, server_url)

Obtain the associated INSDC accession given a set of core database names and a database server URL.

The accession information is obtained from the meta table's meta key assembly.accession.

Parameters:

Name Type Description Default
src_file StrPath

File path with list of core database names.

required
server_url URL

Database server URL.

required

Returns:

Type Description
dict[str, str]

Dict of core database names (key) and their corresponding INSDC assembly accession (value).

Source code in src/python/ensembl/io/genomio/assembly/status.py
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
def fetch_accessions_from_core_dbs(src_file: StrPath, server_url: URL) -> dict[str, str]:
    """Obtain the associated INSDC accession given a set of core database names and a database server URL.

    The accession information is obtained from the `meta` table's meta key `assembly.accession`.

    Args:
        src_file: File path with list of core database names.
        server_url: Database server URL.

    Returns:
        Dict of core database names (key) and their corresponding INSDC assembly accession (value).
    """

    core_accn_meta = {}
    database_count = 0
    count_accn_found = 0

    with Path(src_file).open("r") as fin:
        for line in fin.readlines():
            core_db = line.strip()
            database_count += 1
            db_connection_url = server_url.set(database=core_db)
            db_connection = DBConnection(db_connection_url)
            with db_connection.begin() as conn:
                query_result = conn.execute(
                    text('SELECT meta_value FROM meta WHERE meta_key = "assembly.accession";')
                ).fetchall()

            if not query_result:
                logging.warning(f"No accessions found in core: {core_db}")
            elif len(query_result) == 1:
                count_accn_found += 1
                asm_accession = query_result.pop()[0]
                logging.info(f"{core_db} -> assembly.accession[{asm_accession}]")
                core_accn_meta[core_db] = asm_accession
            else:
                logging.warning(f"Core {core_db} has {len(query_result)} assembly.accessions")

    logging.info(
        f"From initial input core databases ({database_count}), obtained ({count_accn_found}) accessions"
    )

    return core_accn_meta

fetch_datasets_reports(sif_image, assembly_accessions, download_directory, batch_size)

Obtain assembly reports in JSON format for each assembly accession via datasets CLI.

Parameters:

Name Type Description Default
sif_image Client

Instance of Client.loaded() singularity image.

required
assembly_accessions dict[str, str]

Dictionary of accession source <> assembly accessions pairs.

required
download_directory StrPath

Directory path to store assembly report JSON files.

required
batch_size int

Number of assembly accessions to batch submit to datasets.

required

Returns:

Type Description
dict[str, dict]

Dictionary of accession source and its associated assembly report.

Raises:

Type Description
ValueError

If result returned by datasets is not a string.

RuntimeError

If there was an error raised by datasets.

Source code in src/python/ensembl/io/genomio/assembly/status.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
def fetch_datasets_reports(
    sif_image: Client, assembly_accessions: dict[str, str], download_directory: StrPath, batch_size: int
) -> dict[str, dict]:
    """Obtain assembly reports in JSON format for each assembly accession via `datasets` CLI.

    Args:
        sif_image: Instance of `Client.loaded()` singularity image.
        assembly_accessions: Dictionary of accession source <> assembly accessions pairs.
        download_directory: Directory path to store assembly report JSON files.
        batch_size: Number of assembly accessions to batch submit to `datasets`.

    Returns:
        Dictionary of accession source and its associated assembly report.

    Raises:
        ValueError: If result returned by `datasets` is not a string.
        RuntimeError: If there was an error raised by `datasets`.

    """
    master_accn_list = list(assembly_accessions.values())
    combined_asm_reports = {}

    # Setting the number of combined accessions to query in a single call to datasets
    list_split = list(range(0, len(master_accn_list), batch_size))
    accn_subsample = [master_accn_list[ind : ind + batch_size] for ind in list_split]

    datasets_command = ["datasets", "summary", "genome", "accession"]
    for accessions in accn_subsample:
        # Make call to singularity datasets providing a multi-accession query
        client_return = Client.execute(
            image=sif_image, command=datasets_command + accessions, return_result=True, quiet=True
        )
        raw_result = client_return["message"]

        ## Test what result we have obtained following execution of sif image and accession value
        # Returned a list, i.e. datasets returned a result to client.execute
        # Returned a str, i.e. no datasets result obtained exited with fatal error
        if isinstance(raw_result, list):
            result = raw_result[0]
        else:
            result = raw_result
        if not isinstance(result, str):
            raise ValueError("Result obtained from datasets is not a string")
        if re.search("^FATAL", result):
            raise RuntimeError(f"Singularity image execution failed! -> '{result.strip()}'")

        tmp_asm_dict = json.loads(result)
        if not tmp_asm_dict["total_count"]:
            logging.warning(f"No assembly report found for accession(s) {accessions}")
            continue

        logging.info(f"Assembly report obtained for accession(s) {accessions}")
        batch_reports_json = tmp_asm_dict["reports"]
        for assembly_report in batch_reports_json:
            accession = assembly_report["accession"]
            asm_json_outfile = Path(download_directory, f"{accession}.asm_report.json")
            print_json(asm_json_outfile, assembly_report)
            # Save assembly report into source key<>report dict
            for src_key, accession_core in assembly_accessions.items():
                if accession == accession_core:
                    combined_asm_reports[src_key] = assembly_report

    return combined_asm_reports

generate_report_tsv(parsed_asm_reports, query_type, output_directory=Path(), outfile_name='AssemblyStatusReport')

Generate and write the assembly report to a TSV file.

Parameters:

Name Type Description Default
parsed_asm_reports dict[str, ReportStructure]

Parsed assembly report meta.

required
query_type str

Type of query (either core databases or accessions).

required
output_directory StrPath

Directory to store report TSV file.

Path()
outfile_name str

Name to give to the output TSV file.

'AssemblyStatusReport'
Source code in src/python/ensembl/io/genomio/assembly/status.py
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
def generate_report_tsv(
    parsed_asm_reports: dict[str, ReportStructure],
    query_type: str,
    output_directory: StrPath = Path(),
    outfile_name: str = "AssemblyStatusReport",
) -> None:
    """Generate and write the assembly report to a TSV file.

    Args:
        parsed_asm_reports: Parsed assembly report meta.
        query_type: Type of query (either core databases or accessions).
        output_directory: Directory to store report TSV file.
        outfile_name: Name to give to the output TSV file.
    """
    tsv_outfile = Path(output_directory, f"{outfile_name}.tsv")

    header_list = next(iter(parsed_asm_reports.values())).header()
    header_list = [query_type.capitalize().replace("_", " ")] + header_list

    with open(tsv_outfile, "w+") as tsv_out:
        writer = csv.writer(tsv_out, delimiter="\t", lineterminator="\n")
        writer.writerow(header_list)
        for core, report_meta in parsed_asm_reports.items():
            final_asm_report = [core] + report_meta.values()
            writer.writerow(final_asm_report)

get_assembly_accessions(src_file)

Returns the list of assembly accessions found in the provided file.

Parameters:

Name Type Description Default
src_file StrPath

Path to file with one line per INSDC assembly accession.

required

Raises:

Type Description
UnsupportedFormatError

If an accession does not match the INSDC assembly accession format.

Source code in src/python/ensembl/io/genomio/assembly/status.py
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
def get_assembly_accessions(src_file: StrPath) -> list[str]:
    """Returns the list of assembly accessions found in the provided file.

    Args:
        src_file: Path to file with one line per INSDC assembly accession.

    Raises:
        UnsupportedFormatError: If an accession does not match the INSDC assembly accession format.
    """
    query_accessions: list[str] = []
    with Path(src_file).open(mode="r") as fin:
        for line in fin.readlines():
            line = line.strip()
            match = re.match(r"^GC[AF]_[0-9]{9}\.[1-9][0-9]*$", line)
            if not match:
                raise UnsupportedFormatError(f"Could not recognize GCA/GCF accession format: {line}")
            query_accessions.append(line)
    return query_accessions

get_checksums(checksum_path)

Get a dict of checksums from a file, with file names as keys and sums as values

Parameters:

Name Type Description Default
checksum_path Path

Path location to MD5 checksum file.

required
Source code in src/python/ensembl/io/genomio/assembly/download.py
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
def get_checksums(checksum_path: Path) -> Dict[str, str]:
    """
    Get a dict of checksums from a file, with file names as keys and sums as values

    Args:
        checksum_path: Path location to MD5 checksum file.
    """
    sums: Dict[str, str] = {}
    if not checksum_path.is_file():
        return sums
    with checksum_path.open(mode="r") as fh:
        for line in fh:
            checksum, file_path = line.strip().split("  ")
            file_path = file_path[2:]
            if not file_path.find("/") >= 0:
                sums[file_path] = checksum
    return sums

get_files_selection(dl_dir)

Returns a dictionary with the relevant downloaded files classified.

Parameters:

Name Type Description Default
dl_dir Path

Local path to downloaded FTP files.

required

Returns:

Type Description
Dict[str, str]

Dictionary of file type (e.g."report") as keys and the relative file path (from dl_dir) as values.

Raises:

Type Description
FileDownloadError

If dl_dir tree does not include a file named *_assembly_report.txt.

Source code in src/python/ensembl/io/genomio/assembly/download.py
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
def get_files_selection(dl_dir: Path) -> Dict[str, str]:
    """Returns a dictionary with the relevant downloaded files classified.

    Args:
        dl_dir: Local path to downloaded FTP files.

    Returns:
        Dictionary of file type (e.g.`"report"`) as keys and the relative file path (from `dl_dir`) as values.

    Raises:
        FileDownloadError: If `dl_dir` tree does not include a file named `*_assembly_report.txt`.
    """
    files = {}
    root_name = get_root_name(dl_dir)
    if root_name == "":
        raise FileDownloadError(f"Could not determine the files root name in {dl_dir}")
    for dl_file in dl_dir.iterdir():
        for end, name in _FILE_ENDS.items():
            file_with_end = dl_file.name.endswith(end) and not dl_file.name.endswith(f"_from_{end}")
            if (root_name and dl_file.name == root_name + end) or file_with_end:
                files[name] = str(dl_file)
    return files

get_root_name(dl_dir)

Returns the root name, i.e. shared files basename prefix, using the assembly report file as base.

Parameters:

Name Type Description Default
dl_dir Path

Path location of downloaded FTP files.

required
Source code in src/python/ensembl/io/genomio/assembly/download.py
266
267
268
269
270
271
272
273
274
275
276
277
278
def get_root_name(dl_dir: Path) -> str:
    """Returns the root name, i.e. shared files basename prefix, using the assembly report file as base.

    Args:
        dl_dir: Path location of downloaded FTP files.
    """
    root_name = ""
    for dl_file in dl_dir.iterdir():
        matches = re.search("^(.+_)assembly_report.txt", dl_file.name)
        if matches:
            root_name = matches.group(1)
            break
    return root_name

md5_files(dl_dir, md5_path=None, md5_filename='md5checksums.txt')

Check all files checksums with the sums listed in a checksum file, if available. Return False if there is no checksum file, or a file is missing, or has a wrong checksum.

Parameters:

Name Type Description Default
dl_dir Path

Path location to containing downloaded FTP files.

required
md5_path Optional[Path]

Full path to an MD5 checksum file.

None
md5_filename str

Name of a checksum file in the dl_dir (used if no md5_path is given).

'md5checksums.txt'
Source code in src/python/ensembl/io/genomio/assembly/download.py
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
def md5_files(dl_dir: Path, md5_path: Optional[Path] = None, md5_filename: str = "md5checksums.txt") -> bool:
    """
    Check all files checksums with the sums listed in a checksum file, if available.
    Return False if there is no checksum file, or a file is missing, or has a wrong checksum.

    Args:
        dl_dir: Path location to containing downloaded FTP files.
        md5_path: Full path to an MD5 checksum file.
        md5_filename: Name of a checksum file in the `dl_dir` (used if no `md5_path` is given).
    """
    # Get or set md5 file to user or default setting
    if md5_path is None:
        md5_path = dl_dir / md5_filename

    # Get checksums and compare
    sums = get_checksums(md5_path)
    if not sums:
        return False
    logging.info(f" File sums from {md5_path}: {len(sums)}")
    for dl_file, checksum in sums.items():
        for end in _FILE_ENDS:
            if dl_file.endswith(end) and not dl_file.endswith(f"_from_{end}"):
                file_path = dl_dir / dl_file
                if not file_path.is_file():
                    logging.warning(f" No file {file_path} found")
                    return False
                # Check the file checksum
                with file_path.open(mode="rb") as f:
                    content = f.read()
                    file_sum = hashlib.md5(content).hexdigest()
                if file_sum != checksum:
                    logging.warning(f" File {file_path} checksum doesn't match")
                    return False
                logging.info(f" File checksum ok {file_path}")
    logging.info(" All checksums OK")
    return True

retrieve_assembly_data(accession, download_dir, max_increment=0, max_redo=3)

Establishes an FTP connection and downloads a predefined subset of assembly data files from either INSDC or RefSeq.

Parameters:

Name Type Description Default
accession str

Genome assembly accession.

required
download_dir PathLike

Path to where to download FTP files.

required
max_increment int

If you want to allow assembly versions.

0
max_redo int

Maximum FTP connection retry attempts.

3

Raises:

Type Description
FileDownloadError

If no files are downloaded or if any does not match its MD5 checksum.

Source code in src/python/ensembl/io/genomio/assembly/download.py
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
def retrieve_assembly_data(
    accession: str,
    download_dir: PathLike,
    max_increment: int = 0,
    max_redo: int = 3,
) -> None:
    """Establishes an FTP connection and downloads a predefined subset of assembly data files from either
    INSDC or RefSeq.

    Args:
        accession: Genome assembly accession.
        download_dir: Path to where to download FTP files.
        max_increment: If you want to allow assembly versions.
        max_redo: Maximum FTP connection retry attempts.

    Raises:
        FileDownloadError: If no files are downloaded or if any does not match its MD5 checksum.
    """
    download_dir = Path(download_dir)

    # Set and create dedicated dir for download
    download_dir.mkdir(parents=True, exist_ok=True)

    # Download if files don't exist or fail checksum
    if not md5_files(download_dir, None):
        logging.info(" Download the files")

        for increment in range(0, max_increment + 1):
            if increment > 0:
                logging.info(f" Increment accession version once from {accession}")
                version = int(accession[-1])
                version += 1
                accession = accession[:-1] + str(version)
                download_dir.mkdir(parents=True, exist_ok=True)
            ftp_url = "ftp.ncbi.nlm.nih.gov"
            ftp_instance = FTP()
            open_ftp_connection = establish_ftp(ftp_instance, ftp_url, accession)
            download_files(open_ftp_connection, accession, download_dir, max_redo)

        if not md5_files(download_dir, None):
            raise FileDownloadError("Failed md5sum of downloaded files")

    # Select specific files and give them a name
    files = get_files_selection(download_dir)

    if len(files) == 0:
        raise FileDownloadError("No file downloaded")

singularity_image_setter(sif_cache_dir, datasets_version)

Parse ENV and User specified variables related to datasets singularity SIF container and define version and location of container.

Parameters:

Name Type Description Default
sif_cache_dir Path | None

Path to locate existing, or download new SIF container image.

required
datasets_version str | None

URL of singularity container (custom datasets version if desired).

required

Returns:

Type Description
Client

spython.main.client instance of singularity container image housing datasets.

Source code in src/python/ensembl/io/genomio/assembly/status.py
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def singularity_image_setter(sif_cache_dir: Path | None, datasets_version: str | None) -> Client:
    """Parse ENV and User specified variables related to `datasets` singularity SIF
    container and define version and location of container.

    Args:
        sif_cache_dir: Path to locate existing, or download new SIF container image.
        datasets_version: URL of singularity container (custom `datasets` version if desired).

    Returns:
        `spython.main.client` instance of singularity container image housing `datasets`.
    """

    # Set singularity cache dir from user defined path or use environment
    if sif_cache_dir and sif_cache_dir.is_dir():
        image_dl_path = sif_cache_dir
        logging.info(f"Using user-defined cache_dir: '{image_dl_path}'")
    elif os.environ.get("NXF_SINGULARITY_CACHEDIR"):
        image_dl_path = Path(os.environ["NXF_SINGULARITY_CACHEDIR"])
        logging.info(
            f"Using preferred nextflow singularity cache dir 'NXF_SINGULARITY_CACHEDIR': {image_dl_path}"
        )
    elif os.environ.get("SINGULARITY_CACHEDIR"):
        image_dl_path = Path(os.environ["SINGULARITY_CACHEDIR"])
        logging.info(
            f"Using the default singularity installation cache dir 'SINGULARITY_CACHEDIR': {image_dl_path}"
        )
    else:
        image_dl_path = Path()
        logging.warning(f"Unable to set singularity cache dir properly, using CWD {image_dl_path}")

    # Set the datasets version URL
    if datasets_version is None:
        container_url = DATASETS_SINGULARITY["datasets_version_url"]
        logging.info(f"Using default 'ncbi datasets' version '{container_url}'")
    else:
        container_url = datasets_version
        logging.info(f"Using user defined 'ncbi datasets' version '{container_url}'")

    # Pull or load pre-existing 'datasets' singularity container image.
    datasets_image = Client.pull(container_url, stream=False, pull_folder=image_dl_path, quiet=True)

    return datasets_image