Skip to content

Commit

Permalink
Merge pull request #902 from deeptools/dev
Browse files Browse the repository at this point in the history
Release 3.7.5
  • Loading branch information
lldelisle authored Jun 28, 2024
2 parents 67a4f37 + 8ec5add commit c1e9e92
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 1 deletion.
16 changes: 16 additions & 0 deletions docs/content/News.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,22 @@
News and Developments
=====================

Release 3.7.5
-------------
**June 2024**

- Update the version file.

Release 3.7.4
-------------
**24 April 2024**

- Allow chicAggregateStatistic.py to to extract the aggregated data from the views.hdf based on differential.hdf or differential_target.bed. Now the BED may have the target name in the 4th column. In that case, the aggregation is done per target.
- Allow hicCorrectMatrix.py to write filtered out regions to a BED file

Thanks @pavanvidem

Warning: In this version the version file has not been modified so the tools gives 3.7.3 as version.

Release 3.7.3
-------------
Expand Down
2 changes: 1 addition & 1 deletion hicexplorer/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
# This file is originally generated from Git information by running 'setup.py
# version'. Distribution tarballs contain a pre-generated copy of this file.

__version__ = '3.7.3'
__version__ = '3.7.5'
40 changes: 40 additions & 0 deletions hicexplorer/test/general/test_chicAggregateStatistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,46 @@ def are_files_equal(file1, file2, delta=2, skip=0):
return equal


def test_target_list():
outfile_aggregate = NamedTemporaryFile(suffix='.hdf5', delete=False)
outfile_aggregate.close()
args = "--interactionFile {} --targetFile {} --outFileName {} \
-t {}".format(ROOT + 'chicViewpoint/two_matrices.hdf5',
ROOT + 'chicAggregateStatistic/target_list_3col.bed',
outfile_aggregate.name, 10).split()
chicAggregateStatistic.main(args)

aggregateFileH5Object = h5py.File(outfile_aggregate.name, 'r')
assert 'FL-E13-5_chr1_MB-E10-5_chr1' in aggregateFileH5Object
assert 'FL-E13-5_chr1' in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']
assert 'MB-E10-5_chr1' in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']

assert 'genes' in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1']
assert 'genes' in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1']
assert len(aggregateFileH5Object) == 1
assert aggregateFileH5Object.attrs['type'] == 'aggregate'

for chromosome in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1']:

assert len(aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1'][chromosome]) == 3

for gene in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1'][chromosome]:
assert len(aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1'][chromosome][gene]) == 7
for data in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['FL-E13-5_chr1'][chromosome][gene]:
assert data in ['chromosome', 'end_list', 'gene_name', 'raw_target_list', 'relative_distance_list', 'start_list', 'sum_of_interactions']

for chromosome in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1']:

assert len(aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1'][chromosome]) == 3

for gene in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1'][chromosome]:
assert len(aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1'][chromosome][gene]) == 7
for data in aggregateFileH5Object['FL-E13-5_chr1_MB-E10-5_chr1']['MB-E10-5_chr1'][chromosome][gene]:
assert data in ['chromosome', 'end_list', 'gene_name', 'raw_target_list', 'relative_distance_list', 'start_list', 'sum_of_interactions']

aggregateFileH5Object.close()


def test_regular_mode():
outfile_aggregate = NamedTemporaryFile(suffix='.hdf5', delete=False)
outfile_aggregate.close()
Expand Down

0 comments on commit c1e9e92

Please sign in to comment.