From 34b2974e4bc7ee6140c627603237bee5a7408aea Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 29 Jul 2016 20:44:14 -0700 Subject: [PATCH 01/56] work in progress --- quilt.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/quilt.py b/quilt.py index 46791da..2ba5bc3 100644 --- a/quilt.py +++ b/quilt.py @@ -142,6 +142,13 @@ def order_by(self, fields): def search(self, term): self._search = term + def commit(self, message): + data = { + response = requests.post("%s/tables/%s/commit/" % (self.connection.url, self.id), + data = json.dumps(data), + headers=HEADERS, + auth=self.connection.auth) + def next(self): try: From 683047d2bae12b0fc63f6f59a9532e806fc4337b Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 1 Aug 2016 13:17:57 -0700 Subject: [PATCH 02/56] Commit history and checkout API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit List a table’s commit history and check out a particular commit. --- quilt.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/quilt.py b/quilt.py index 565f5d9..0fd4ae9 100644 --- a/quilt.py +++ b/quilt.py @@ -185,13 +185,35 @@ def order_by(self, fields): def search(self, term): self._search = term + @property + def commits(self): + response = requests.get("%s/data/%s/commits/" % (self.connection.url, self.id), + headers=HEADERS, + auth=self.connection.auth) + if response.status_code == requests.codes.ok: + return response.json() + else: + print "Oops, something went wrong." + return response + def commit(self, message): - data = { - response = requests.post("%s/tables/%s/commit/" % (self.connection.url, self.id), + data = {'message' : message} + response = requests.post("%s/data/%s/commits/" % (self.connection.url, self.id), data = json.dumps(data), headers=HEADERS, auth=self.connection.auth) + def checkout(self, commit): + data = {} + response = requests.post("%s/data/%s/commits/%s/checkout/" % (self.connection.url, self.id, commit), + data = json.dumps(data), + headers=HEADERS, + auth=self.connection.auth) + if response.status_code == requests.codes.ok: + self.__iter__() + else: + print response.text + def next(self): try: From 1fa650b894b5fefcbf54057b9016ab2d4677cd2b Mon Sep 17 00:00:00 2001 From: brennv Date: Wed, 24 Aug 2016 16:13:10 -0700 Subject: [PATCH 03/56] convert to rest --- README.md | 309 ----------------------------------------------------- README.rst | 167 +++++++++++++++++++++++++++++ 2 files changed, 167 insertions(+), 309 deletions(-) delete mode 100644 README.md create mode 100644 README.rst diff --git a/README.md b/README.md deleted file mode 100644 index 545bfcd..0000000 --- a/README.md +++ /dev/null @@ -1,309 +0,0 @@ -# Convert files to live data sets on Quilt -## Optional prep (your steps may vary) -1. Get a list of files you want to upload (see `get-files-to-upload/`) -2. Download the files in the list (see `curl-all.py`) -3. Unzip downloaded files (if needed) -```bash -cd downloads -gunzip *.gz -``` -## Upload to Quilt -4. Use `data_set.py` to create individual data sets (see `python data_set.py --help`). -You will need a Quilt username and password. Or use `batch.py` to create multiple data sets. -```bash -python data_set.py - -e https://quiltdata.com - -u USERNAME - -n "ENCODE data" - -d "#A549 #histone peak data #hg19" - -f downloads/wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak -``` - -## File formats in this example -* [ENCDOE broadPeak format](https://genome.ucsc.edu/FAQ/FAQformat.html#format13) - -## Resources -* [ENCODE Project](https://www.encodeproject.org/) - - -# REST API - -| Action | Endpoint | Details | -|--------|----------|-------------| -| New table | `POST /tables/` | [See below](#create-table) | -| Delete table | `DELETE /tables/TABLE_ID/` | [See below](#delete-table) | -| Update table meta-data | `PATCH /tables/TABLE_ID` | [See below](#update-table-meta-data) | -| Add column to table | `POST /tables/TABLE_ID/columns/` | [See below](#add-column-to-table) | -| Append row to table | `POST /data/TABLE_ID/rows/` | [See below](#append-row) | -| Get table rows | `GET /data/TABLE_ID/rows` | [See below](#get-rows) | -| Get table row | `GET /data/TABLE_ID/rows/ROW_ID` | [See below](#get-row) | -| Genome intersect or subtract | `POST /genemath/` | [See below](#intersect-or-subtract) | - -Notes -* For all REST calls, the content-type is `application/JSON`. -* Description fields automatically linkify URLs and support `, , , , ` tags - - -## Tables -### Create table -`POST /tables/` -#### Data format -```javascript -{ - 'name': string, - 'description': text `, , , , ` tags supported; automatic linkification of URLs - 'columns': [ - { - 'name': string, - 'sqlname': optional string, - 'description': optional text, - 'type' : one of 'String', 'Number', 'Image', 'Text' - }, - ... - ] -} -``` - -#### Returns -Table data as JSON object, includes `id` field with the table's identifier. - -### Add column to table -`POST /tables/TABLE_ID/columns/` -#### Data format -```javascript -{ - 'name': string, - 'sqlname': optional string, - 'description': text, - 'type': one of 'String', 'Number', 'Image', or 'Text' -} -``` -#### Returns -Column data as JSON object, includes `id` field with the column's identifier. - - -### Delete table -`DELETE /tables/TABLE_ID` - -### Update table meta-data -`PATCH /tables/TABLE_ID` - -#### Data format -```javascript -{ - 'name': string, - 'description': text -} -``` - -## Table Data -* Use column `sqlname` as keys in input data - -### Append row -`POST /data/TABLE_ID/rows/` - -#### Data format -```javascript -[ - {columnSqlname0: value0, columnSqlname1 : value1, ... }, - ... -] -``` - -### Get rows -`GET /data/TABLE_ID/rows` -* Rows are keyed by the Quilt Row ID field `qrid` -* NOTE: Currently limited to the first 500 rows - -#### Returns -Row data as JSON object, keyed by column.sqlname. - -### Get row -`GET /data/TABLE_ID/rows/ROW_ID` - -#### Returns -Row data as JSON object, keyed by column.sqlname. - -## Quilt tables - -### Join -`POST /quilts/` -#### Data format -```javascript -{ - 'left_table_id': int, - 'right_table_id': int, - 'left_column_id': int, - 'right_column_id': int, - 'jointype': one of 'inner', 'leftOuter', 'firstMatch' -} -``` - -#### Returns -Quilt info as JSON object, includes `sqlname` field with the quilt's identifier. - -### Undo join -`DELETE /quilts/QUILT_SQLNAME` - - -## Genome Math -* Performs a gene math operation on two tables -* Creates a new table with the result. -* Columns are specified by `column.id`. - -### Intersect or subtract -`POST /genemath/` - -#### Data Format -```javascript -{ - 'operator': one of 'Intersect' or 'Subtract', - 'left_chr': integer (column id), - 'left_start': integer (column id), - 'left_end': integer (column id), - 'right_chr': integer (column id), - 'right_start': integer (column id), - 'right_end': integer (column id) -} -``` -#### Returns -JSON object representing the result table. - -# Python - -The Quilt Python connector uses the Quilt REST API and SQL Alchemy (http://docs.sqlalchemy.org/), -if installed, to access and update data sets in Quilt. Quilt tables are available as dictionaries -or Pandas (http://pandas.pydata.org/) DataFrames. - -## Connection - -To use the Quilt Python connector, add this repository to your PYTHONPATH -and import quilt. - -Connect to Quilt by creating a Connection object: - -```python -import quilt -connection = quilt.Connection(username) -Password: *enter your password* -``` - -The connection will contain a list of your Quilt tables: -```python -connection.tables -``` - -### Search for Data Sets -You can also find tables by searching your own tables and Quilt's public data sets -```python -connection.search('term') -``` - -### Get Table -Get a table by Table id using get_table: -```python -t = connection.get_table(1234) -``` - -### Create a New Table -Using the connection, you can create new tables in Quilt. To create an empty -table: -```python -t = connection.create_table(name, description) -``` - -To create a table from an input file: -```python -t = connection.create_table(name, description, inputfile=path_to_input_file) -``` - -Or, to create a new table from a DataFrame: -```python -t = connection.save_df(df, name, description="table description") -``` - -## Table - -Each Table object has a list of Columns -```python -mytable.columns -``` - -After the columns have been fetched, columns are available as table attributes. -```python -mytable.column1 -``` - -### Accessing Table Data - -Tables are iterable. To access table data: -```python -for row in mytable: - print row -``` - -#### Search -Search for matching rows in a table by calling search. -```python -for row in mytable.search('foo'): - print row -``` - -#### Order By -Sort the table by any column or set of columns. You can set the ordering by passing -a string that is the column's field (name in the database). - -```python -mytable.order_by('column1') -``` - -You can find column field names with their ".field" attribute: -```python -mytable.order_by(mytable.column1.field) -``` -You can sort by multiple columns by passing a list of fields. -```python -mytable.order_by(['column2', 'column1']) -``` -To sort in descending order, add a "-" in front of the column field name: -```python -mytable.order_by('-column1') -``` - -#### Limit -Limit the number of rows returned by calling limit(number_of_rows). - -#### Putting it all together -Search, order_by and limit can be combined to return just the data you -want to see. For example, to return the top 2 finishers with the name Sally -from a table of race results (race_results: [name_000, time_001]), you could write: - -```python -for result in race_results.search('Sally').order_by('-time_001').limit(2): - print row -``` - -### Pandas DataFrame - -Access a table's data as a Pandas DataFrame by calling -mytable.df() - -You can also combine the querying methods above to access particular rows. -race_results.search('Sally').order_by('-time_001').limit(2).df() - -### Gene Math - -Quilt supports intersect and subtract for tables that store genomic regions. Those -operations assume that tables have columns storing: Chromsome, start and end. The -function get_bed_cols tries to infer those columns based on column names. - -If the guessing fails, or to override the guess, set the chromosome, start, end -columns explicitly with set_bed_cols. -mytable.set_bed_cols(mytable.chr_001, mytable.start_002, mytable.end_003) - -Once the bed columns are set for both tables, they can be intersected and subtracted. -```python -result = tableA.intersect(tableB) -result = tableA.intersect_wao(tableB) -result = tableA.subtract(tableB) -``` diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..44d18dd --- /dev/null +++ b/README.rst @@ -0,0 +1,167 @@ +Convert files to live data sets on Quilt +======================================== + +Optional prep (your steps may vary) +----------------------------------- + +#. Get a list of files you want to upload (see ``get-files-to-upload/``) +#. Download the files in the list (see ``curl-all.py``) +#. Unzip downloaded files (if needed) + + .. code:: bash + + cd downloads + gunzip *.gz + + .. rubric:: Upload to Quilt + :name: upload-to-quilt + +#. | Use ``data_set.py`` to create individual data sets (see + ``python data_set.py --help``). + | You will need a Quilt username and password. Or use ``batch.py`` to + create multiple data sets. + + .. code:: bash + + python data_set.py + -e https://quiltdata.com + -u USERNAME + -n "ENCODE data" + -d "#A549 #histone peak data #hg19" + -f downloads/wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak + +File formats in this example +---------------------------- + +- `ENCDOE broadPeak format`_ + +Resources +--------- + +- `ENCODE Project`_ + +REST API +======== + ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Action | Endpoint | Details | ++================================+======================================+===========================================+ +| New table | ``POST /tables/`` | `See below`_ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Delete table | ``DELETE /tables/TABLE_ID/`` | `See below <#delete-table>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Update table meta-data | ``PATCH /tables/TABLE_ID`` | `See below <#update-table-meta-data>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Add column to table | ``POST /tables/TABLE_ID/columns/`` | `See below <#add-column-to-table>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Append row to table | ``POST /data/TABLE_ID/rows/`` | `See below <#append-row>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Get table rows | ``GET /data/TABLE_ID/rows`` | `See below <#get-rows>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Get table row | ``GET /data/TABLE_ID/rows/ROW_ID`` | `See below <#get-row>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Genome intersect or subtract | ``POST /genemath/`` | `See below <#intersect-or-subtract>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ + +Notes + +- For all REST calls, the content-type is ``application/JSON``. +- Description fields automatically linkify URLs and support + ``, , , , `` tags + +Tables +------ + +Create table +~~~~~~~~~~~~ + +``POST /tables/`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'description': text `, , , , ` tags supported; automatic linkification of URLs + 'columns': [ + { + 'name': string, + 'sqlname': optional string, + 'description': optional text, + 'type' : one of 'String', 'Number', 'Image', 'Text' + }, + ... + ] + } + +Returns +^^^^^^^ + +Table data as JSON object, includes ``id`` field with the table’s +identifier. + +Add column to table +~~~~~~~~~~~~~~~~~~~ + +``POST /tables/TABLE_ID/columns/`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'sqlname': optional string, + 'description': text, + 'type': one of 'String', 'Number', 'Image', or 'Text' + } + +Returns +^^^^^^^ + +Column data as JSON object, includes ``id`` field with the column’s +identifier. + +Delete table +~~~~~~~~~~~~ + +``DELETE /tables/TABLE_ID`` + +Update table meta-data +~~~~~~~~~~~~~~~~~~~~~~ + +``PATCH /tables/TABLE_ID`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'description': text + } + +Table Data +---------- + +- Use column ``sqlname`` as keys in input data + +Append row +~~~~~~~~~~ + +``POST /data/TABLE_ID/rows/`` + +Data format +^^^^^^^^^^^ + +| \`\`\`javascript +| [ +| {columnSqlname0: value0, c + +.. _ENCDOE broadPeak format: https://genome.ucsc.edu/FAQ/FAQformat.html#format13 +.. _ENCODE Project: https://www.encodeproject.org/ +.. _See below: #create-table From 822466bba59e2f7930b2632416646e4274324522 Mon Sep 17 00:00:00 2001 From: brennv Date: Wed, 24 Aug 2016 16:13:18 -0700 Subject: [PATCH 04/56] add setup --- setup.cfg | 5 +++++ setup.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..7469488 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,5 @@ +[metadata] +description-file = README.rst + +[bdist_wheel] +universal = 1 diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..375e428 --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +from setuptools import setup + + +def readme(): + with open('README.rst', 'r') as f: + return f.read() + + +setup( + name='quiltdata', + packages=['quiltdata'], + version='0.1.0', + description='Quilt Python API https://quiltdata.com', + long_description=readme(), + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + # for more options see https://pypi.python.org/pypi?%3Aaction=list_classifiers + ], + author='Kevin Moore', + author_email='???@???.com', # same as you regestered + license='???', # you will probably have to add a LICENSE.txt to the repo + url='https://github.com/quiltdata/API', + download_url='https://github.com/quiltdata/API/tarball/0.1.0', + keywords='quiltdata api social shareable data platform', + install_requires=[ + 'requests', + 'numpy', + 'pandas', + 'psycopg2', + 'sqlalchemy', + # probably more + ], + include_package_data=True, + zip_safe=False) From 8f368cd9aa2e67edc154f54277ad6cb3e4bc806d Mon Sep 17 00:00:00 2001 From: brennv Date: Wed, 24 Aug 2016 16:13:29 -0700 Subject: [PATCH 05/56] folder spelling --- {get-files-to-uplaod => get-files-to-upload}/README.md | 0 {get-files-to-uplaod => get-files-to-upload}/curl-all.py | 0 .../extract-matching-files.py | 0 .../wgEncodeBroadHistone-All.txt | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename {get-files-to-uplaod => get-files-to-upload}/README.md (100%) rename {get-files-to-uplaod => get-files-to-upload}/curl-all.py (100%) rename {get-files-to-uplaod => get-files-to-upload}/extract-matching-files.py (100%) rename {get-files-to-uplaod => get-files-to-upload}/wgEncodeBroadHistone-All.txt (100%) diff --git a/get-files-to-uplaod/README.md b/get-files-to-upload/README.md similarity index 100% rename from get-files-to-uplaod/README.md rename to get-files-to-upload/README.md diff --git a/get-files-to-uplaod/curl-all.py b/get-files-to-upload/curl-all.py similarity index 100% rename from get-files-to-uplaod/curl-all.py rename to get-files-to-upload/curl-all.py diff --git a/get-files-to-uplaod/extract-matching-files.py b/get-files-to-upload/extract-matching-files.py similarity index 100% rename from get-files-to-uplaod/extract-matching-files.py rename to get-files-to-upload/extract-matching-files.py diff --git a/get-files-to-uplaod/wgEncodeBroadHistone-All.txt b/get-files-to-upload/wgEncodeBroadHistone-All.txt similarity index 100% rename from get-files-to-uplaod/wgEncodeBroadHistone-All.txt rename to get-files-to-upload/wgEncodeBroadHistone-All.txt From 890536b2129aca5b1ce77fb1648b4aa6b71302e6 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 12:03:33 -0700 Subject: [PATCH 06/56] Clean up README --- README.rst | 280 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 274 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 44d18dd..d8b197b 100644 --- a/README.rst +++ b/README.rst @@ -158,10 +158,278 @@ Append row Data format ^^^^^^^^^^^ -| \`\`\`javascript -| [ -| {columnSqlname0: value0, c +.. code:: javascript + + [ + {columnSqlname0: value0, columnSqlname1 : value1, ... }, + ... + ] + +Get rows +~~~~~~~~ + +``GET /data/TABLE_ID/rows`` \* Rows are keyed by the Quilt Row ID field +``qrid`` \* NOTE: Currently limited to the first 500 rows + +Returns +^^^^^^^ + +Row data as JSON object, keyed by column.sqlname. + +Get row +~~~~~~~ + +``GET /data/TABLE_ID/rows/ROW_ID`` + +Returns +^^^^^^^ + +Row data as JSON object, keyed by column.sqlname. + +Quilt tables +------------ + +Join +~~~~ + +``POST /quilts/`` #### Data format + +.. code:: javascript + + { + 'left_table_id': int, + 'right_table_id': int, + 'left_column_id': int, + 'right_column_id': int, + 'jointype': one of 'inner', 'leftOuter', 'firstMatch' + } + +Returns +^^^^^^^ + +Quilt info as JSON object, includes ``sqlname`` field with the quilt’s +identifier. + +Undo join +~~~~~~~~~ + +``DELETE /quilts/QUILT_SQLNAME`` + +Genome Math +----------- + +- Performs a gene math operation on two tables +- Creates a new table with the result. +- Columns are specified by ``column.id``. + +Intersect or subtract +~~~~~~~~~~~~~~~~~~~~~ + +``POST /genemath/`` + +Data Format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'operator': one of 'Intersect' or 'Subtract', + 'left_chr': integer (column id), + 'left_start': integer (column id), + 'left_end': integer (column id), + 'right_chr': integer (column id), + 'right_start': integer (column id), + 'right_end': integer (column id) + } + +Returns +^^^^^^^ + +JSON object representing the result table. + +Python +====== + +The Quilt Python connector uses the Quilt REST API and SQL Alchemy +(http://docs.sqlalchemy.org/), if installed, to access and update data +sets in Quilt. Quilt tables are available as dictionaries or Pandas +(http://pandas.pydata.org/) DataFrames. + +Connection +---------- + +To use the Quilt Python connector, add this repository to your +PYTHONPATH and import quilt. + +Connect to Quilt by creating a Connection object: + +.. code:: python + + import quilt + connection = quilt.Connection(username) + Password: *enter your password* + +The connection will contain a list of your Quilt tables: + +.. code:: python + + connection.tables + +Search for Data Sets +~~~~~~~~~~~~~~~~~~~~ + +You can also find tables by searching your own tables and Quilt’s public +data sets + +.. code:: python + + connection.search('term') + +Get Table +~~~~~~~~~ + +Get a table by Table id using get\_table: + +.. code:: python + + t = connection.get_table(1234) + +Create a New Table +~~~~~~~~~~~~~~~~~~ + +Using the connection, you can create new tables in Quilt. To create an +empty table: + +.. code:: python + + t = connection.create_table(name, description) + +To create a table from an input file: + +.. code:: python + + t = connection.create_table(name, description, inputfile=path_to_input_file) + +Or, to create a new table from a DataFrame: + +.. code:: python + + t = connection.save_df(df, name, description="table description") + + + +Table +----- + +Each Table object has a list of Columns + +.. code:: python + + mytable.columns + +After the columns have been fetched, columns are available as table +attributes. + +.. code:: python + + mytable.column1 + +Accessing Table Data +~~~~~~~~~~~~~~~~~~~~ + +Tables are iterable. To access table data: + +.. code:: python + + for row in mytable: + print row + +Search +^^^^^^ + +Search for matching rows in a table by calling search. + +.. code:: python + + for row in mytable.search('foo'): + print row + +Order By +^^^^^^^^ + +Sort the table by any column or set of columns. You can set the ordering +by passing a string that is the column’s field (name in the database). + +.. code:: python + + mytable.order_by('column1') + +You can find column field names with their “.field” attribute: + +.. code:: python + + mytable.order_by(mytable.column1.field) + +You can sort by multiple columns by passing a list of fields. + +.. code:: python + + mytable.order_by(['column2', 'column1']) + +To sort in descending order, add a “-” in front of the column field +name: + +.. code:: python + + mytable.order_by('-column1') + +Limit +^^^^^ + +Limit the number of rows returned by calling limit(number\_of\_rows). + +Putting it all together +^^^^^^^^^^^^^^^^^^^^^^^ + +Search, order\_by and limit can be combined to return just the data you +want to see. For example, to return the top 2 finishers with the name +Sally from a table of race results (race\_results: [name\_000, +time\_001]), you could write: + +.. code:: python + + for result in race_results.search('Sally').order_by('-time_001').limit(2): + print row + +Pandas DataFrame +~~~~~~~~~~~~~~~~ + +Access a table’s data as a Pandas DataFrame by calling mytable.df() + +You can also combine the querying methods above to access particular +rows. +.. code:: python + + race_results.search('Sally').order_by('-time\_001').limit(2).df() + +Gene Math +~~~~~~~~~ + +Quilt supports intersect and subtract for tables that store genomic +regions. Those operations assume that tables have columns storing: +Chromsome, start and end. The function get\_bed\_cols tries to infer +those columns based on column names. + +If the guessing fails, or to override the guess, set the chromosome, +start, end columns explicitly with set\_bed\_cols. +mytable.set\_bed\_cols(mytable.chr\_001, mytable.start\_002, +mytable.end\_003) + +Once the bed columns are set for both tables, they can be intersected +and subtracted. + +.. code:: python -.. _ENCDOE broadPeak format: https://genome.ucsc.edu/FAQ/FAQformat.html#format13 -.. _ENCODE Project: https://www.encodeproject.org/ -.. _See below: #create-table + result = tableA.intersect(tableB) + result = tableA.intersect_wao(tableB) + result = tableA.subtract(tableB) From e97c7f7302427c13df41799f7d404d7d30a21ad6 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 12:06:27 -0700 Subject: [PATCH 07/56] Code example not showing up right in README --- README.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index d8b197b..25d5974 100644 --- a/README.rst +++ b/README.rst @@ -408,9 +408,10 @@ Access a table’s data as a Pandas DataFrame by calling mytable.df() You can also combine the querying methods above to access particular rows. + .. code:: python - race_results.search('Sally').order_by('-time\_001').limit(2).df() + race_results.search('Sally').order_by('-time\_001').limit(2).df() Gene Math ~~~~~~~~~ From 199808dbc616fd03fa21c36b95c5eee17e943e07 Mon Sep 17 00:00:00 2001 From: Quilt Data Date: Thu, 25 Aug 2016 13:14:13 -0700 Subject: [PATCH 08/56] Create LICENSE.txt --- LICENSE.txt | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 LICENSE.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..661fd1e --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Quilt Data + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From 95964fa04cd02a7a4328046e708a0b3927e83d3d Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 13:22:56 -0700 Subject: [PATCH 09/56] Clean up setup for PyPI --- setup.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 375e428..4444eb0 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ def readme(): setup( - name='quiltdata', - packages=['quiltdata'], + name='quilt', + packages=['quilt'], version='0.1.0', description='Quilt Python API https://quiltdata.com', long_description=readme(), @@ -24,9 +24,9 @@ def readme(): 'Programming Language :: Python :: 3.5', # for more options see https://pypi.python.org/pypi?%3Aaction=list_classifiers ], - author='Kevin Moore', - author_email='???@???.com', # same as you regestered - license='???', # you will probably have to add a LICENSE.txt to the repo + author='quiltdata', + author_email='founders@quiltdata.io', # same as you regestered + license='LICENSE.txt', # you will probably have to add a LICENSE.txt to the repo url='https://github.com/quiltdata/API', download_url='https://github.com/quiltdata/API/tarball/0.1.0', keywords='quiltdata api social shareable data platform', @@ -36,7 +36,6 @@ def readme(): 'pandas', 'psycopg2', 'sqlalchemy', - # probably more ], include_package_data=True, zip_safe=False) From 15a81068d97d190fd59c353a486b9ae9e946b473 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 14:14:23 -0700 Subject: [PATCH 10/56] Convert python connector from file to package Created a quilt package for code clarity and compatibility with pip. --- quilt/__init__.py | 18 +++ quilt/connection.py | 207 +++++++++++++++++++++++++ quilt/lib.py | 19 +++ quilt.py => quilt/table.py | 308 ++----------------------------------- quilt/util.py | 58 +++++++ 5 files changed, 316 insertions(+), 294 deletions(-) create mode 100644 quilt/__init__.py create mode 100644 quilt/connection.py create mode 100644 quilt/lib.py rename quilt.py => quilt/table.py (57%) create mode 100644 quilt/util.py diff --git a/quilt/__init__.py b/quilt/__init__.py new file mode 100644 index 0000000..423431e --- /dev/null +++ b/quilt/__init__.py @@ -0,0 +1,18 @@ + +from .util import File, Quilt +from .table import Table, Column +from .connection import Connection + +try: + import pandas + PANDAS = True +except: + PANDAS = False + +try: + import psycopg2 + import sqlalchemy + SQLALCHEMY = True +except: + SQLALCHEMY = False + diff --git a/quilt/connection.py b/quilt/connection.py new file mode 100644 index 0000000..5eb9f28 --- /dev/null +++ b/quilt/connection.py @@ -0,0 +1,207 @@ +import json +import getpass +import requests +import sys + +from mimetypes import MimeTypes +from multiprocessing import Pool + +from .lib import * +from .table import * + +class Connection(object): + + def __init__(self, username, url=QUILT_URL): + self.url = url + self.username = username + self.password = getpass.getpass() + self.auth = requests.auth.HTTPBasicAuth(self.username, self.password) + self.status_code = None + self.userid = None + self._tables = None + self._files = None + self._pool = None + self._sqlengine = None + response = requests.get("%s/users/%s/" % (self.url, username), + headers=HEADERS, + auth=requests.auth.HTTPBasicAuth(self.username, self.password)) + self.status_code = response.status_code + if response.status_code == requests.codes.ok: + userdata = response.json() + self._tables = [Table(self, d) for d in userdata['tables']] + self.userid = userdata['id'] + self.profile = userdata['profile'] + if SQLALCHEMY: + self._sqlengine = sa.create_engine(self.profile.get('odbc').get('url')) + else: + print "Login Failed. Please check your credentials and try again." + + def __del__(self): + if self._pool: + self._pool.close() + self._pool.join() + + def get_thread_pool(self): + if not self._pool: + self._pool = Pool(processes=8) + return self._pool + + def search(self, search): + matches = [] + if isinstance(search, list): + terms = search + else: + terms = [search] + + params = {'search' : terms} + response = requests.get("%s/tables/" % (self.url), + headers=HEADERS, + params=params, + auth=self.auth) + if response.status_code == 200: + data = response.json() + matches = [Table(self, d) for d in data] + else: + print "Oops, something went wrong." + print "response=%s" % response.status_code + + return matches + + @property + def tables(self): + if not self._tables: + response = requests.get("%s/users/%s/" % (self.url, self.username), + headers=HEADERS, + auth=requests.auth.HTTPBasicAuth(self.username, self.password)) + self.status_code = response.status_code + if response.status_code == requests.codes.ok: + userdata = response.json() + self._tables = [Table(self, d) for d in userdata['tables']] + else: + print "Oops, something went wrong." + print "response=%s" % response.status_code + self._tables = [] + return self._tables + + @property + def files(self): + if not self._files: + response = requests.get("%s/files/" % (self.url), + headers=HEADERS, + auth=requests.auth.HTTPBasicAuth(self.username, self.password)) + self.status_code = response.status_code + if response.status_code == requests.codes.ok: + filedata = response.json() + print filedata + self._filedata = filedata + self._files = [File(self, d) for d in filedata['results']] + else: + print "Oops, something went wrong." + print "response=%s" % response.status_code + self._files = [] + return self._files + + def get_table(self, table_id): + response = requests.get("%s/tables/%s/" % (self.url, table_id), + headers=HEADERS, + auth=self.auth) + + if response.status_code == requests.codes.ok: + return Table(self, response.json()) + else: + print "Oops, something went wrong." + print response.text + return None + + def create_table(self, name, description=None, columns=None, inputfile=None): + data = { 'name' : name } + if description: + data['description'] = description + if inputfile: + if columns: + print "Please specify either a set of columns or an input file, not both" + return None + + if isinstance(inputfile, File): + data['csvfile'] = inputfile.fullpath + else: + f = self.upload(inputfile) + data['csvfile'] = f.fullpath + elif columns: + data['columns'] = columns + + response = requests.post("%s/tables/" % self.url, + data = json.dumps(data), + headers=HEADERS, + auth=self.auth) + + if response.status_code == requests.codes.ok: + return Table(self, response.json()) + else: + print response.text + return response.text + + def save_df(self, df, name, description=None): + type_map = { 'object' : 'String', + 'float16' : 'Number', + 'float32' : 'Number', + 'float64' : 'Number', + 'int8' : 'Number', + 'int16' : 'Number', + 'int32' : 'Number', + 'int64' : 'Number', + 'unicode' : 'String' } + + if not PANDAS: + print "Install pandas to use DataFrames: http://pandas.pydata.org/" + return None + + schema = { 'name' : name, 'columns' : [] } + if description: + schema['description'] = description + + for i, col in enumerate(df.columns): + dt = df.dtypes[i] + ctype = type_map.get(str(dt), None) + if not ctype: + print "Oops, unrecognized type %s in Data Frame" % dt + return None + + schema['columns'].append({'name' : col, 'type' : ctype }) + + response = requests.post("%s/tables/" % self.url, + data = json.dumps(schema), + headers=HEADERS, + auth=self.auth) + + if response.status_code == requests.codes.ok: + table = Table(self, response.json()) + else: + print response.text + return None + + response = table.create(df.to_dict('records').values()) + if response.status_code != requests.codes.ok: + print "Oops, something went wrong." + print response.text + + return table + + def upload(self, filepath): + filename = filepath.split('/')[-1] + mime = MimeTypes() + mime_type = mime.guess_type(filename) + data = { 'filename' : filename, 'mime_type' : mime_type } + response = requests.post("%s/files/" % self.url, + data = json.dumps(data), + headers=HEADERS, + auth=self.auth) + + if response.status_code == requests.codes.created: + f = File(self, response.json()) + with open(filepath, 'rb') as localfile: + response = requests.put(f.upload_url, + data=localfile) + return f + else: + print response.text diff --git a/quilt/lib.py b/quilt/lib.py new file mode 100644 index 0000000..a1088da --- /dev/null +++ b/quilt/lib.py @@ -0,0 +1,19 @@ +import json +import requests +import sys + +HEADERS = {"Content-Type": "application/json", "Accept": "application/json"} +QUILT_URL = 'https://quiltdata.com' + +try: + import pandas as pd + PANDAS = True +except: + PANDAS = False + +try: + import psycopg2 + import sqlalchemy as sa + SQLALCHEMY = True +except: + SQLALCHEMY = False diff --git a/quilt.py b/quilt/table.py similarity index 57% rename from quilt.py rename to quilt/table.py index df587eb..a6a249f 100644 --- a/quilt.py +++ b/quilt/table.py @@ -1,26 +1,5 @@ -import json -import getpass -import requests -import sys - -try: - import pandas - PANDAS = True -except: - PANDAS = False - -try: - import psycopg2 - import sqlalchemy - SQLALCHEMY = True -except: - SQLALCHEMY = False - -from mimetypes import MimeTypes -from multiprocessing import Pool - -HEADERS = {"Content-Type": "application/json", "Accept": "application/json"} -QUILT_URL = 'https://quiltdata.com' + +from lib import * def make_post_request(url, data, auth): response = None @@ -40,65 +19,6 @@ def rowgen(buffer): for row in buffer: yield row -class File(object): - def __init__(self, connection, data): - self._data = data - self.connection = connection - self.id = data['id'] - self.owner = data['owner'] - self.filename = data['filename'] - self.fullpath = data['fullpath'] - self.url = data['url'] - self.creds = data['s3creds'] - self.upload_url = data['upload_url'] - self.status = data['status'] - - def refresh(self): - response = requests.get("%s/files/%s/" % (self.connection.url, self.id), - headers=HEADERS, - auth=self.connection.auth) - if response.status_code == requests.codes.ok: - self.__init__(self.connection, response.json()) - else: - print "Oops, something went wrong." - print response.status_code - return response - - def download(self): - url = self.url - outfile = self.filename - - r = requests.get(url, stream=True) - with open(outfile, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - return outfile - - -class Quilt(object): - def __init__(self, table, data): - self.table = table - self.id = data['sqlname'] - self.left_column = data['left_column'] - self.right_column = data['right_column'] - self.jointype = data['jointype'] - self.right_table = data['right_table_name'] - - def delete(self): - if not self.id: - return requests.codes.not_found - - connection = self.table.connection - response = requests.delete("%s/quilts/%s/" % (connection.url, self.id), - headers=HEADERS, - auth=connection.auth) - if response.status_code == requests.codes.no_content: - self.table._quilts = None - self.id = None - return response.status_code - - class Column(object): def __init__(self, table, id): self.table = table @@ -118,6 +38,7 @@ def __str__(self): def __repr__(self): return "" % (self.table.sqlname, self.field) + class Table(object): _schema = None _quilts = None @@ -261,17 +182,17 @@ def df(self): if self.connection._sqlengine and self._search is None: type_map = { - 'String' : sqlalchemy.String, - 'Number' : sqlalchemy.Float, - 'Text' : sqlalchemy.Text, - 'Date' : sqlalchemy.Date, - 'DateTime' : sqlalchemy.DateTime, - 'Image' : sqlalchemy.String } + 'String' : sa.String, + 'Number' : sa.Float, + 'Text' : sa.Text, + 'Date' : sa.Date, + 'DateTime' : sa.DateTime, + 'Image' : sa.String } - columns = [sqlalchemy.Column(c.field, type_map[c.type]) for c in self.columns] - table = sqlalchemy.Table(self.sqlname, sqlalchemy.MetaData(),*columns) + columns = [sa.Column(c.field, type_map[c.type]) for c in self.columns] + table = sa.Table(self.sqlname, sa.MetaData(),*columns) - stmt = sqlalchemy.select([table]) + stmt = sa.select([table]) if self._ordering_fields: ordering_clause = [] for f in self._ordering_fields: @@ -285,7 +206,7 @@ def df(self): if self._limit is not None: stmt = stmt.limit(self._limit) - return pandas.read_sql(stmt, self.connection._sqlengine) + return pd.read_sql(stmt, self.connection._sqlengine) else: data = [] index = [] @@ -295,7 +216,7 @@ def df(self): break index.append(row['qrid']) data.append(row) - return pandas.DataFrame(data, columns=columns, index=index) + return pd.DataFrame(data, columns=columns, index=index) def __getitem__(self, qrid): response = requests.get("%s/data/%s/rows/%s" % (self.connection.url, self.id, qrid), @@ -491,204 +412,3 @@ def intersect(self, b): headers=HEADERS, auth=self.connection.auth) return response - - -class Connection(object): - - def __init__(self, username, url=QUILT_URL): - self.url = url - self.username = username - self.password = getpass.getpass() - self.auth = requests.auth.HTTPBasicAuth(self.username, self.password) - self.status_code = None - self.userid = None - self._tables = None - self._files = None - self._pool = None - self._sqlengine = None - response = requests.get("%s/users/%s/" % (self.url, username), - headers=HEADERS, - auth=requests.auth.HTTPBasicAuth(self.username, self.password)) - self.status_code = response.status_code - if response.status_code == requests.codes.ok: - userdata = response.json() - self._tables = [Table(self, d) for d in userdata['tables']] - self.userid = userdata['id'] - self.profile = userdata['profile'] - if SQLALCHEMY: - self._sqlengine = sqlalchemy.create_engine(self.profile.get('odbc').get('url')) - else: - print "Login Failed. Please check your credentials and try again." - - def __del__(self): - if self._pool: - self._pool.close() - self._pool.join() - - def get_thread_pool(self): - if not self._pool: - self._pool = Pool(processes=8) - return self._pool - - def search(self, search): - matches = [] - if isinstance(search, list): - terms = search - else: - terms = [search] - - params = {'search' : terms} - response = requests.get("%s/tables/" % (self.url), - headers=HEADERS, - params=params, - auth=self.auth) - if response.status_code == 200: - data = response.json() - matches = [Table(self, d) for d in data] - else: - print "Oops, something went wrong." - print "response=%s" % response.status_code - - return matches - - @property - def tables(self): - if not self._tables: - response = requests.get("%s/users/%s/" % (self.url, self.username), - headers=HEADERS, - auth=requests.auth.HTTPBasicAuth(self.username, self.password)) - self.status_code = response.status_code - if response.status_code == requests.codes.ok: - userdata = response.json() - self._tables = [Table(self, d) for d in userdata['tables']] - else: - print "Oops, something went wrong." - print "response=%s" % response.status_code - self._tables = [] - return self._tables - - @property - def files(self): - if not self._files: - response = requests.get("%s/files/" % (self.url), - headers=HEADERS, - auth=requests.auth.HTTPBasicAuth(self.username, self.password)) - self.status_code = response.status_code - if response.status_code == requests.codes.ok: - filedata = response.json() - print filedata - self._filedata = filedata - self._files = [File(self, d) for d in filedata['results']] - else: - print "Oops, something went wrong." - print "response=%s" % response.status_code - self._files = [] - return self._files - - def get_table(self, table_id): - response = requests.get("%s/tables/%s/" % (self.url, table_id), - headers=HEADERS, - auth=self.auth) - - if response.status_code == requests.codes.ok: - return Table(self, response.json()) - else: - print "Oops, something went wrong." - print response.text - return None - - def create_table(self, name, description=None, columns=None, inputfile=None): - data = { 'name' : name } - if description: - data['description'] = description - if inputfile: - if columns: - print "Please specify either a set of columns or an input file, not both" - return None - - if isinstance(inputfile, File): - data['csvfile'] = inputfile.fullpath - else: - f = self.upload(inputfile) - data['csvfile'] = f.fullpath - elif columns: - data['columns'] = columns - - response = requests.post("%s/tables/" % self.url, - data = json.dumps(data), - headers=HEADERS, - auth=self.auth) - - if response.status_code == requests.codes.ok: - return Table(self, response.json()) - else: - print response.text - return response.text - - def save_df(self, df, name, description=None): - type_map = { 'object' : 'String', - 'float16' : 'Number', - 'float32' : 'Number', - 'float64' : 'Number', - 'int8' : 'Number', - 'int16' : 'Number', - 'int32' : 'Number', - 'int64' : 'Number', - 'unicode' : 'String' } - - if not PANDAS: - print "Install pandas to use DataFrames: http://pandas.pydata.org/" - return None - - schema = { 'name' : name, 'columns' : [] } - if description: - schema['description'] = description - - for i, col in enumerate(df.columns): - dt = df.dtypes[i] - ctype = type_map.get(str(dt), None) - if not ctype: - print "Oops, unrecognized type %s in Data Frame" % dt - return None - - schema['columns'].append({'name' : col, 'type' : ctype }) - - response = requests.post("%s/tables/" % self.url, - data = json.dumps(schema), - headers=HEADERS, - auth=self.auth) - - if response.status_code == requests.codes.ok: - table = Table(self, response.json()) - else: - print response.text - return None - - response = table.create(df.to_dict('records').values()) - if response.status_code != requests.codes.ok: - print "Oops, something went wrong." - print response.text - - return table - - def upload(self, filepath): - filename = filepath.split('/')[-1] - mime = MimeTypes() - mime_type = mime.guess_type(filename) - data = { 'filename' : filename, 'mime_type' : mime_type } - response = requests.post("%s/files/" % self.url, - data = json.dumps(data), - headers=HEADERS, - auth=self.auth) - - if response.status_code == requests.codes.created: - f = File(self, response.json()) - with open(filepath, 'rb') as localfile: - response = requests.put(f.upload_url, - data=localfile) - return f - else: - print response.text - - - diff --git a/quilt/util.py b/quilt/util.py new file mode 100644 index 0000000..0cfc1b4 --- /dev/null +++ b/quilt/util.py @@ -0,0 +1,58 @@ +class File(object): + def __init__(self, connection, data): + self._data = data + self.connection = connection + self.id = data['id'] + self.owner = data['owner'] + self.filename = data['filename'] + self.fullpath = data['fullpath'] + self.url = data['url'] + self.creds = data['s3creds'] + self.upload_url = data['upload_url'] + self.status = data['status'] + + def refresh(self): + response = requests.get("%s/files/%s/" % (self.connection.url, self.id), + headers=HEADERS, + auth=self.connection.auth) + if response.status_code == requests.codes.ok: + self.__init__(self.connection, response.json()) + else: + print "Oops, something went wrong." + print response.status_code + return response + + def download(self): + url = self.url + outfile = self.filename + + r = requests.get(url, stream=True) + with open(outfile, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + f.write(chunk) + return outfile + + +class Quilt(object): + def __init__(self, table, data): + self.table = table + self.id = data['sqlname'] + self.left_column = data['left_column'] + self.right_column = data['right_column'] + self.jointype = data['jointype'] + self.right_table = data['right_table_name'] + + def delete(self): + if not self.id: + return requests.codes.not_found + + connection = self.table.connection + response = requests.delete("%s/quilts/%s/" % (connection.url, self.id), + headers=HEADERS, + auth=connection.auth) + if response.status_code == requests.codes.no_content: + self.table._quilts = None + self.id = None + return response.status_code + From 733342aec72fdaeb4c773156cd6a85e408a81ac7 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 14:32:06 -0700 Subject: [PATCH 11/56] Cleanup extra import --- quilt/__init__.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/quilt/__init__.py b/quilt/__init__.py index 423431e..54ff63f 100644 --- a/quilt/__init__.py +++ b/quilt/__init__.py @@ -2,17 +2,3 @@ from .util import File, Quilt from .table import Table, Column from .connection import Connection - -try: - import pandas - PANDAS = True -except: - PANDAS = False - -try: - import psycopg2 - import sqlalchemy - SQLALCHEMY = True -except: - SQLALCHEMY = False - From 5d9baba110b8f7d07d2298fe0682d2f323090b5f Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 14:35:56 -0700 Subject: [PATCH 12/56] Updating release tag in setup --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4444eb0..7764c6c 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='quilt', packages=['quilt'], - version='0.1.0', + version='0.1.2', description='Quilt Python API https://quiltdata.com', long_description=readme(), classifiers=[ From b31bff0cf363b33d07a82f0755f57536967c89a6 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 21:14:31 -0700 Subject: [PATCH 13/56] Save large DF in chunks Also adds date time type in type_map --- quilt/connection.py | 15 ++++++++++----- quilt/table.py | 9 +++++++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/quilt/connection.py b/quilt/connection.py index 5eb9f28..05be4b3 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -150,7 +150,8 @@ def save_df(self, df, name, description=None): 'int16' : 'Number', 'int32' : 'Number', 'int64' : 'Number', - 'unicode' : 'String' } + 'unicode' : 'String', + 'datetime64[ns, UTC]' : 'DateTime' } if not PANDAS: print "Install pandas to use DataFrames: http://pandas.pydata.org/" @@ -180,10 +181,14 @@ def save_df(self, df, name, description=None): print response.text return None - response = table.create(df.to_dict('records').values()) - if response.status_code != requests.codes.ok: - print "Oops, something went wrong." - print response.text + chunksz = 500 + nrows = len(df.index) + for start in range(0, nrows, chunksz): + end = start + chunksz + response = table.create_json(df[start:end].to_json(orient='records')) + if response.status_code != requests.codes.ok: + print "Oops, something went wrong." + print response.text return table diff --git a/quilt/table.py b/quilt/table.py index a6a249f..a459e4a 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -338,6 +338,15 @@ def create(self, data): return response + def create_json(self, jsondata): + response = requests.post("%s/data/%s/rows/" % (self.connection.url, self.id), + data = jsondata, + headers=HEADERS, + auth=self.connection.auth) + + return response + + def create_async(self, data, callback=None): """ Use an asynchronous POST request with the process pool. From f6c5767df5df240b6ed3b1a06944c3bcb0dfd929 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 21:15:53 -0700 Subject: [PATCH 14/56] Update release version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7764c6c..35f572f 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='quilt', packages=['quilt'], - version='0.1.2', + version='0.1.3', description='Quilt Python API https://quiltdata.com', long_description=readme(), classifiers=[ From 4a99ca52fc207a1d024785878188ab074b0db7cc Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 22:39:59 -0700 Subject: [PATCH 15/56] Add pip install instructions to README --- .gitignore | 2 ++ README.rst | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/.gitignore b/.gitignore index c678a5e..ffd0589 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] +dist/ +quilt.egg-info/ diff --git a/README.rst b/README.rst index 25d5974..61d9098 100644 --- a/README.rst +++ b/README.rst @@ -255,6 +255,12 @@ The Quilt Python connector uses the Quilt REST API and SQL Alchemy sets in Quilt. Quilt tables are available as dictionaries or Pandas (http://pandas.pydata.org/) DataFrames. +The Quilt Python connector is available via PyPI: +https://pypi.python.org/pypi/quilt + +.. code:: shell + pip install quilt + Connection ---------- From 490413da1d3dc7d8444d9b848dfb673ea430bc66 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 22:42:43 -0700 Subject: [PATCH 16/56] Update README --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 61d9098..e327879 100644 --- a/README.rst +++ b/README.rst @@ -258,7 +258,7 @@ sets in Quilt. Quilt tables are available as dictionaries or Pandas The Quilt Python connector is available via PyPI: https://pypi.python.org/pypi/quilt -.. code:: shell +.. code:: python pip install quilt Connection From 2b57063d84532ecd4452f42d66156cf636c260c5 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 25 Aug 2016 22:43:30 -0700 Subject: [PATCH 17/56] Another README update --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index e327879..bd9052a 100644 --- a/README.rst +++ b/README.rst @@ -259,8 +259,10 @@ The Quilt Python connector is available via PyPI: https://pypi.python.org/pypi/quilt .. code:: python + pip install quilt + Connection ---------- From ebace904afbaccdf0b795459a7e50ce044480265 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 30 Aug 2016 15:19:58 -0700 Subject: [PATCH 18/56] Bug fixes Removing stale intersect (from bad merge) and cleaning up gene math. Also, added a missing import of util. --- quilt/connection.py | 1 + quilt/table.py | 49 +++++++++++++++------------------------------ 2 files changed, 17 insertions(+), 33 deletions(-) diff --git a/quilt/connection.py b/quilt/connection.py index 05be4b3..7fa7a57 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -8,6 +8,7 @@ from .lib import * from .table import * +from .util import * class Connection(object): diff --git a/quilt/table.py b/quilt/table.py index a459e4a..bc99fae 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -1,5 +1,6 @@ from lib import * +from .util import * def make_post_request(url, data, auth): response = None @@ -210,10 +211,8 @@ def df(self): else: data = [] index = [] - columns = [c['sqlname'] for c in self.columns] + columns = [c.field for c in self.columns] for i, row in enumerate(self): - if limit and i>limit: - break index.append(row['qrid']) data.append(row) return pd.DataFrame(data, columns=columns, index=index) @@ -247,18 +246,25 @@ def _genemath(self, b, operator): print "Chromosome, start, stop columns not found in table %s." % b.name return - data = { 'left_chr' : a_chr, - 'left_start' : a_start, - 'left_end' : a_end, - 'right_chr' : b_chr, - 'right_start' : b_start, - 'right_end' : b_end, + data = { 'left_chr' : a_chr.id, + 'left_start' : a_start.id, + 'left_end' : a_end.id, + 'right_chr' : b_chr.id, + 'right_start' : b_start.id, + 'right_end' : b_end.id, 'operator' : operator } response = requests.post("%s/genemath/" % self.connection.url, data = json.dumps(data), headers=HEADERS, auth=self.connection.auth) - return response + if response.status_code == requests.codes.ok: + data = response.json() + result_table_id = data.get('table') + result = self.connection.get_table(result_table_id) + return result + else: + print "Oops, something went wrong" + return response def export(self): response = requests.get("%s/data/%s/rows/export" % (self.connection.url, self.id), @@ -398,26 +404,3 @@ def subtract(self, b): def intersect_wao(self, b): return self._genemath(b, 'Intersect_WAO') - def intersect(self, b): - a_chr, a_start, a_end = self.get_bed_cols() - if not (a_chr and a_start and a_end): - print "Chromosome, start, stop columns not found." - return - - b_chr, b_start, b_end = b.get_bed_cols() - if not (b_chr and b_start and b_end): - print "Chromosome, start, stop columns not found in table %s." % b.name - return - - data = { 'left_chr' : a_chr, - 'left_start' : a_start, - 'left_end' : a_end, - 'right_chr' : b_chr, - 'right_start' : b_start, - 'right_end' : b_end, - 'operator' : 'Intersect' } - response = requests.post("%s/genemath/" % self.connection.url, - data = json.dumps(data), - headers=HEADERS, - auth=self.connection.auth) - return response From eb63c24bf52060dded81f8571a1e47b1f9fa94b9 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 30 Aug 2016 17:59:56 -0700 Subject: [PATCH 19/56] Added fast save for data frames Using async create to implement (much) faster save for data frames. --- quilt/connection.py | 29 ++++++++++++++++++++++++----- quilt/table.py | 10 ++++++++++ 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/quilt/connection.py b/quilt/connection.py index 05be4b3..a201317 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -9,6 +9,11 @@ from .lib import * from .table import * +def status_check(response): + if response.status_code != requests.codes.ok: + print "Warning: server responded with %s" % response.status_code + + class Connection(object): def __init__(self, username, url=QUILT_URL): @@ -181,15 +186,29 @@ def save_df(self, df, name, description=None): print response.text return None - chunksz = 500 + chunksz = 250 + maxreq = 40 nrows = len(df.index) + res = [] for start in range(0, nrows, chunksz): end = start + chunksz - response = table.create_json(df[start:end].to_json(orient='records')) - if response.status_code != requests.codes.ok: - print "Oops, something went wrong." - print response.text + while len(res) > maxreq: + finished = [(r, b) for r, b in res if r.ready()] + res[:] = [(r, b) for r, b in res if not r.ready()] + for r, b in finished: + if not r.successful(): + print "Retrying:" + print b + res.append((t.create_async(b, status_check), b)) + if len(res) > maxreq: + r, b = res[0] + r.wait() + + buffer = df[start:end].to_json(orient='records') + res.append((table.create_json_async(buffer, callback=status_check), buffer)) + + return table def upload(self, filepath): diff --git a/quilt/table.py b/quilt/table.py index a459e4a..9cd3896 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -357,6 +357,16 @@ def create_async(self, data, callback=None): callback=callback) return res + def create_json_async(self, jsondata, callback=None): + """ + Use an asynchronous POST request with the process pool. + """ + url = "%s/data/%s/rows/" % (self.connection.url, self.id) + res = self.connection.get_thread_pool().apply_async(make_post_request, + args=(url, jsondata, self.connection.auth), + callback=callback) + return res + def quilt(self, left_column, right_column): data = {} data['left_table'] = self.id From 43a808e8dba414df7a3a12b233bd91ce832c9f6f Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 30 Aug 2016 18:32:46 -0700 Subject: [PATCH 20/56] Update release number --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 35f572f..852b7db 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='quilt', packages=['quilt'], - version='0.1.3', + version='0.1.4', description='Quilt Python API https://quiltdata.com', long_description=readme(), classifiers=[ From ac8eefe06748ae3167aeea32bbe17f51332d8d05 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 31 Aug 2016 14:24:57 -0700 Subject: [PATCH 21/56] Added missing import in util.py --- quilt/util.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/quilt/util.py b/quilt/util.py index 0cfc1b4..df3a1aa 100644 --- a/quilt/util.py +++ b/quilt/util.py @@ -1,3 +1,5 @@ +from .lib import * + class File(object): def __init__(self, connection, data): self._data = data From c997eced53c53842c4240e41bde7b0b674d272d2 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 7 Sep 2016 15:49:41 -0700 Subject: [PATCH 22/56] Find real data in commits response json --- README.rst | 84 +++++++++++++++++++++++++------------------------- quilt/table.py | 3 +- 2 files changed, 44 insertions(+), 43 deletions(-) diff --git a/README.rst b/README.rst index bd9052a..be3f565 100644 --- a/README.rst +++ b/README.rst @@ -1,45 +1,3 @@ -Convert files to live data sets on Quilt -======================================== - -Optional prep (your steps may vary) ------------------------------------ - -#. Get a list of files you want to upload (see ``get-files-to-upload/``) -#. Download the files in the list (see ``curl-all.py``) -#. Unzip downloaded files (if needed) - - .. code:: bash - - cd downloads - gunzip *.gz - - .. rubric:: Upload to Quilt - :name: upload-to-quilt - -#. | Use ``data_set.py`` to create individual data sets (see - ``python data_set.py --help``). - | You will need a Quilt username and password. Or use ``batch.py`` to - create multiple data sets. - - .. code:: bash - - python data_set.py - -e https://quiltdata.com - -u USERNAME - -n "ENCODE data" - -d "#A549 #histone peak data #hg19" - -f downloads/wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak - -File formats in this example ----------------------------- - -- `ENCDOE broadPeak format`_ - -Resources ---------- - -- `ENCODE Project`_ - REST API ======== @@ -442,3 +400,45 @@ and subtracted. result = tableA.intersect(tableB) result = tableA.intersect_wao(tableB) result = tableA.subtract(tableB) + +Convert files to live data sets on Quilt +======================================== + +Optional prep (your steps may vary) +----------------------------------- + +#. Get a list of files you want to upload (see ``get-files-to-upload/``) +#. Download the files in the list (see ``curl-all.py``) +#. Unzip downloaded files (if needed) + + .. code:: bash + + cd downloads + gunzip *.gz + + .. rubric:: Upload to Quilt + :name: upload-to-quilt + +#. | Use ``data_set.py`` to create individual data sets (see + ``python data_set.py --help``). + | You will need a Quilt username and password. Or use ``batch.py`` to + create multiple data sets. + + .. code:: bash + + python data_set.py + -e https://quiltdata.com + -u USERNAME + -n "ENCODE data" + -d "#A549 #histone peak data #hg19" + -f downloads/wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak + +File formats in this example +---------------------------- + +- `ENCDOE broadPeak format`_ + +Resources +--------- + +- `ENCODE Project`_ diff --git a/quilt/table.py b/quilt/table.py index a27fd43..624a22d 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -295,7 +295,8 @@ def commits(self): headers=HEADERS, auth=self.connection.auth) if response.status_code == requests.codes.ok: - return response.json() + # We'll need to handle paging for large commit histories + return response.json()['results'] else: print "Oops, something went wrong." return response From e60260fc49594ba456947dba4603d9491856e73a Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 7 Sep 2016 16:13:51 -0700 Subject: [PATCH 23/56] Reorder README to put Python API first for PyPI Our PyPI page was showing the out-of-date python upload script first, this will put the Python API up front. --- README.rst | 414 ++++++++++++++++++++++++++--------------------------- 1 file changed, 207 insertions(+), 207 deletions(-) diff --git a/README.rst b/README.rst index be3f565..9018fa1 100644 --- a/README.rst +++ b/README.rst @@ -1,210 +1,3 @@ -REST API -======== - -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Action | Endpoint | Details | -+================================+======================================+===========================================+ -| New table | ``POST /tables/`` | `See below`_ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Delete table | ``DELETE /tables/TABLE_ID/`` | `See below <#delete-table>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Update table meta-data | ``PATCH /tables/TABLE_ID`` | `See below <#update-table-meta-data>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Add column to table | ``POST /tables/TABLE_ID/columns/`` | `See below <#add-column-to-table>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Append row to table | ``POST /data/TABLE_ID/rows/`` | `See below <#append-row>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Get table rows | ``GET /data/TABLE_ID/rows`` | `See below <#get-rows>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Get table row | ``GET /data/TABLE_ID/rows/ROW_ID`` | `See below <#get-row>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Genome intersect or subtract | ``POST /genemath/`` | `See below <#intersect-or-subtract>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ - -Notes - -- For all REST calls, the content-type is ``application/JSON``. -- Description fields automatically linkify URLs and support - ``, , , , `` tags - -Tables ------- - -Create table -~~~~~~~~~~~~ - -``POST /tables/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'description': text `, , , , ` tags supported; automatic linkification of URLs - 'columns': [ - { - 'name': string, - 'sqlname': optional string, - 'description': optional text, - 'type' : one of 'String', 'Number', 'Image', 'Text' - }, - ... - ] - } - -Returns -^^^^^^^ - -Table data as JSON object, includes ``id`` field with the table’s -identifier. - -Add column to table -~~~~~~~~~~~~~~~~~~~ - -``POST /tables/TABLE_ID/columns/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'sqlname': optional string, - 'description': text, - 'type': one of 'String', 'Number', 'Image', or 'Text' - } - -Returns -^^^^^^^ - -Column data as JSON object, includes ``id`` field with the column’s -identifier. - -Delete table -~~~~~~~~~~~~ - -``DELETE /tables/TABLE_ID`` - -Update table meta-data -~~~~~~~~~~~~~~~~~~~~~~ - -``PATCH /tables/TABLE_ID`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'description': text - } - -Table Data ----------- - -- Use column ``sqlname`` as keys in input data - -Append row -~~~~~~~~~~ - -``POST /data/TABLE_ID/rows/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - [ - {columnSqlname0: value0, columnSqlname1 : value1, ... }, - ... - ] - -Get rows -~~~~~~~~ - -``GET /data/TABLE_ID/rows`` \* Rows are keyed by the Quilt Row ID field -``qrid`` \* NOTE: Currently limited to the first 500 rows - -Returns -^^^^^^^ - -Row data as JSON object, keyed by column.sqlname. - -Get row -~~~~~~~ - -``GET /data/TABLE_ID/rows/ROW_ID`` - -Returns -^^^^^^^ - -Row data as JSON object, keyed by column.sqlname. - -Quilt tables ------------- - -Join -~~~~ - -``POST /quilts/`` #### Data format - -.. code:: javascript - - { - 'left_table_id': int, - 'right_table_id': int, - 'left_column_id': int, - 'right_column_id': int, - 'jointype': one of 'inner', 'leftOuter', 'firstMatch' - } - -Returns -^^^^^^^ - -Quilt info as JSON object, includes ``sqlname`` field with the quilt’s -identifier. - -Undo join -~~~~~~~~~ - -``DELETE /quilts/QUILT_SQLNAME`` - -Genome Math ------------ - -- Performs a gene math operation on two tables -- Creates a new table with the result. -- Columns are specified by ``column.id``. - -Intersect or subtract -~~~~~~~~~~~~~~~~~~~~~ - -``POST /genemath/`` - -Data Format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'operator': one of 'Intersect' or 'Subtract', - 'left_chr': integer (column id), - 'left_start': integer (column id), - 'left_end': integer (column id), - 'right_chr': integer (column id), - 'right_start': integer (column id), - 'right_end': integer (column id) - } - -Returns -^^^^^^^ - -JSON object representing the result table. - Python ====== @@ -442,3 +235,210 @@ Resources --------- - `ENCODE Project`_ + +REST API +======== + ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Action | Endpoint | Details | ++================================+======================================+===========================================+ +| New table | ``POST /tables/`` | `See below`_ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Delete table | ``DELETE /tables/TABLE_ID/`` | `See below <#delete-table>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Update table meta-data | ``PATCH /tables/TABLE_ID`` | `See below <#update-table-meta-data>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Add column to table | ``POST /tables/TABLE_ID/columns/`` | `See below <#add-column-to-table>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Append row to table | ``POST /data/TABLE_ID/rows/`` | `See below <#append-row>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Get table rows | ``GET /data/TABLE_ID/rows`` | `See below <#get-rows>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Get table row | ``GET /data/TABLE_ID/rows/ROW_ID`` | `See below <#get-row>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ +| Genome intersect or subtract | ``POST /genemath/`` | `See below <#intersect-or-subtract>`__ | ++--------------------------------+--------------------------------------+-------------------------------------------+ + +Notes + +- For all REST calls, the content-type is ``application/JSON``. +- Description fields automatically linkify URLs and support + ``, , , , `` tags + +Tables +------ + +Create table +~~~~~~~~~~~~ + +``POST /tables/`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'description': text `, , , , ` tags supported; automatic linkification of URLs + 'columns': [ + { + 'name': string, + 'sqlname': optional string, + 'description': optional text, + 'type' : one of 'String', 'Number', 'Image', 'Text' + }, + ... + ] + } + +Returns +^^^^^^^ + +Table data as JSON object, includes ``id`` field with the table’s +identifier. + +Add column to table +~~~~~~~~~~~~~~~~~~~ + +``POST /tables/TABLE_ID/columns/`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'sqlname': optional string, + 'description': text, + 'type': one of 'String', 'Number', 'Image', or 'Text' + } + +Returns +^^^^^^^ + +Column data as JSON object, includes ``id`` field with the column’s +identifier. + +Delete table +~~~~~~~~~~~~ + +``DELETE /tables/TABLE_ID`` + +Update table meta-data +~~~~~~~~~~~~~~~~~~~~~~ + +``PATCH /tables/TABLE_ID`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'name': string, + 'description': text + } + +Table Data +---------- + +- Use column ``sqlname`` as keys in input data + +Append row +~~~~~~~~~~ + +``POST /data/TABLE_ID/rows/`` + +Data format +^^^^^^^^^^^ + +.. code:: javascript + + [ + {columnSqlname0: value0, columnSqlname1 : value1, ... }, + ... + ] + +Get rows +~~~~~~~~ + +``GET /data/TABLE_ID/rows`` \* Rows are keyed by the Quilt Row ID field +``qrid`` \* NOTE: Currently limited to the first 500 rows + +Returns +^^^^^^^ + +Row data as JSON object, keyed by column.sqlname. + +Get row +~~~~~~~ + +``GET /data/TABLE_ID/rows/ROW_ID`` + +Returns +^^^^^^^ + +Row data as JSON object, keyed by column.sqlname. + +Quilt tables +------------ + +Join +~~~~ + +``POST /quilts/`` #### Data format + +.. code:: javascript + + { + 'left_table_id': int, + 'right_table_id': int, + 'left_column_id': int, + 'right_column_id': int, + 'jointype': one of 'inner', 'leftOuter', 'firstMatch' + } + +Returns +^^^^^^^ + +Quilt info as JSON object, includes ``sqlname`` field with the quilt’s +identifier. + +Undo join +~~~~~~~~~ + +``DELETE /quilts/QUILT_SQLNAME`` + +Genome Math +----------- + +- Performs a gene math operation on two tables +- Creates a new table with the result. +- Columns are specified by ``column.id``. + +Intersect or subtract +~~~~~~~~~~~~~~~~~~~~~ + +``POST /genemath/`` + +Data Format +^^^^^^^^^^^ + +.. code:: javascript + + { + 'operator': one of 'Intersect' or 'Subtract', + 'left_chr': integer (column id), + 'left_start': integer (column id), + 'left_end': integer (column id), + 'right_chr': integer (column id), + 'right_start': integer (column id), + 'right_end': integer (column id) + } + +Returns +^^^^^^^ + +JSON object representing the result table. From aa76c4b3a9ca94b892c8568f1911cce8995f5fa1 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 7 Sep 2016 16:21:55 -0700 Subject: [PATCH 24/56] Update release version --- quilt/table.py | 6 +++++- setup.py | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/quilt/table.py b/quilt/table.py index 624a22d..4c9bd79 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -221,7 +221,11 @@ def __getitem__(self, qrid): response = requests.get("%s/data/%s/rows/%s" % (self.connection.url, self.id, qrid), headers=HEADERS, auth=self.connection.auth) - return response.json() + if response.status_code == requests.codes.ok: + return response.json() + else: + print "Oops, something went wrong." + return response def __delitem__(self, qrid): response = requests.delete("%s/data/%s/rows/%s" % (self.connection.url, self.id, qrid), diff --git a/setup.py b/setup.py index 852b7db..7f2c84a 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='quilt', packages=['quilt'], - version='0.1.4', + version='0.1.5', description='Quilt Python API https://quiltdata.com', long_description=readme(), classifiers=[ @@ -28,7 +28,7 @@ def readme(): author_email='founders@quiltdata.io', # same as you regestered license='LICENSE.txt', # you will probably have to add a LICENSE.txt to the repo url='https://github.com/quiltdata/API', - download_url='https://github.com/quiltdata/API/tarball/0.1.0', + download_url='https://github.com/quiltdata/API/tarball/0.1.5', keywords='quiltdata api social shareable data platform', install_requires=[ 'requests', From e3f59cabc657fa677f1665eff658c74edee082ea Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 12 Sep 2016 09:51:25 -0700 Subject: [PATCH 25/56] Added merge First cut merge is working. We need to test against duplicating branch updates. --- quilt/connection.py | 4 ++-- quilt/table.py | 46 ++++++++++++++++++++++++++++++++++++++------- quilt/util.py | 35 ++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 9 deletions(-) diff --git a/quilt/connection.py b/quilt/connection.py index 13c559b..c627d66 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -107,13 +107,13 @@ def files(self): self._files = [] return self._files - def get_table(self, table_id): + def get_table(self, table_id, branch=None): response = requests.get("%s/tables/%s/" % (self.url, table_id), headers=HEADERS, auth=self.auth) if response.status_code == requests.codes.ok: - return Table(self, response.json()) + return Table(self, response.json(), branch=branch) else: print "Oops, something went wrong." print response.text diff --git a/quilt/table.py b/quilt/table.py index 4c9bd79..342fc35 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -46,6 +46,7 @@ class Table(object): _chr = None _start = None _end = None + branch = None def __init__(self, con, id, name, sqlname, description, owner, is_public): self.nextlink = None @@ -58,7 +59,7 @@ def __init__(self, con, id, name, sqlname, description, owner, is_public): self.is_public = is_public self._reset_iteration() - def __init__(self, con, data): + def __init__(self, con, data, branch=None): self.connection = con self.id = data.get('id') self.name = data.get('name') @@ -67,6 +68,7 @@ def __init__(self, con, data): self.owner = data.get('owner') self.is_public = data.get('is_public') self._reset_iteration() + self.branch = branch if data.has_key('columns'): self._schema = [Column(self, cdata) for cdata in data.get('columns')] @@ -236,7 +238,10 @@ def __delitem__(self, qrid): def __iter__(self): self._buffer = [] self._generator = rowgen(self._buffer) - self.nextlink = "%s/data/%s/rows/" % (self.connection.url, self.id) + if self.branch: + self.nextlink = "%s/data/%s/branches/%s/rows/" % (self.connection.url, self.id, self.branch) + else: + self.nextlink = "%s/data/%s/rows/" % (self.connection.url, self.id) return self def _genemath(self, b, operator): @@ -312,6 +317,24 @@ def commit(self, message): headers=HEADERS, auth=self.connection.auth) + def create_branch(self, name, parent=None): + data = {'table_id' : self.id, + 'name' : name} + if parent: + data['parent'] = parent + + response = requests.post("%s/data/%s/branches/" % (self.connection.url, self.id), + data = json.dumps(data), + headers=HEADERS, + auth=self.connection.auth) + if response.status_code == requests.codes.ok: + self.__iter__() + self.branch = name + else: + print "Oops, something went wrong." + + return response + def checkout(self, commit): data = {} response = requests.post("%s/data/%s/commits/%s/checkout/" % (self.connection.url, self.id, commit), @@ -321,7 +344,8 @@ def checkout(self, commit): if response.status_code == requests.codes.ok: self.__iter__() else: - print response.text + print "Oops, something went wrong." + return response def _reset_iteration(self): self._buffer = [] @@ -371,10 +395,18 @@ def next(self): raise StopIteration() def create(self, data): - response = requests.post("%s/data/%s/rows/" % (self.connection.url, self.id), - data = json.dumps(data), - headers=HEADERS, - auth=self.connection.auth) + if self.branch: + response = requests.post("%s/data/%s/branches/%s/rows/" % (self.connection.url, + self.id, + self.branch), + data = json.dumps(data), + headers=HEADERS, + auth=self.connection.auth) + else: + response = requests.post("%s/data/%s/rows/" % (self.connection.url, self.id), + data = json.dumps(data), + headers=HEADERS, + auth=self.connection.auth) return response diff --git a/quilt/util.py b/quilt/util.py index df3a1aa..d0922a7 100644 --- a/quilt/util.py +++ b/quilt/util.py @@ -58,3 +58,38 @@ def delete(self): self.id = None return response.status_code + +class Branch(object): + def __init__(self, table, data): + self.table = table + self.__id = data['id'] + assert self.table.id == data['table'] + self.name = data['name'] + self.head = data['head'] + + def delete(self): + if not self.__id: + return requests.codes.not_found + + connection = self.table.connection + response = requests.delete("%s/data/%s/branches/%s/" % (connection.url, + self.table.id, + self.name), + headers=HEADERS, + auth=connection.auth) + if response.status_code == requests.codes.no_content: + self.__id = None + self.name = None + return response.status_code + + def merge(self, other): + data = {'name' : other.name} + connection = self.table.connection + response = requests.post("%s/data/%s/branches/%s/merge" % (connection.url, + self.table.id, + self.name), + data=json.dumps(data), + headers=HEADERS, + auth=connection.auth) + return response + From 48e536354d73e3bed12eab467946a4e8cf00e014 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 13 Sep 2016 18:59:14 -0700 Subject: [PATCH 26/56] Bug fixes and updates to match Quilt API changes Adding get_branch to Table for convenience and moving ODBC credentials from profile (public) to user (private). Updated API paths to reflect commits moving under branches. --- quilt/__init__.py | 2 +- quilt/connection.py | 2 +- quilt/table.py | 25 +++++++++++++++++++++---- quilt/util.py | 2 +- 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/quilt/__init__.py b/quilt/__init__.py index 54ff63f..dd48de6 100644 --- a/quilt/__init__.py +++ b/quilt/__init__.py @@ -1,4 +1,4 @@ -from .util import File, Quilt +from .util import File, Quilt, Branch from .table import Table, Column from .connection import Connection diff --git a/quilt/connection.py b/quilt/connection.py index c627d66..083afbc 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -38,7 +38,7 @@ def __init__(self, username, url=QUILT_URL): self.userid = userdata['id'] self.profile = userdata['profile'] if SQLALCHEMY: - self._sqlengine = sa.create_engine(self.profile.get('odbc').get('url')) + self._sqlengine = sa.create_engine(userdata.get('odbc').get('url')) else: print "Login Failed. Please check your credentials and try again." diff --git a/quilt/table.py b/quilt/table.py index 342fc35..57776cc 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -300,7 +300,8 @@ def limit(self, limit): @property def commits(self): - response = requests.get("%s/data/%s/commits/" % (self.connection.url, self.id), + branch = self.branch if self.branch else 'master' + response = requests.get("%s/data/%s/branches/%s/commits/" % (self.connection.url, self.id, branch), headers=HEADERS, auth=self.connection.auth) if response.status_code == requests.codes.ok: @@ -311,8 +312,9 @@ def commits(self): return response def commit(self, message): + branch = self.branch if self.branch else 'master' data = {'message' : message} - response = requests.post("%s/data/%s/commits/" % (self.connection.url, self.id), + response = requests.post("%s/data/%s/branches/%s/commits/" % (self.connection.url, self.id, branch), data = json.dumps(data), headers=HEADERS, auth=self.connection.auth) @@ -329,15 +331,30 @@ def create_branch(self, name, parent=None): auth=self.connection.auth) if response.status_code == requests.codes.ok: self.__iter__() - self.branch = name + branch = Branch(self, response.json()) + self.branch = branch.name else: print "Oops, something went wrong." return response + def get_branch(self, name): + response = requests.get("%s/data/%s/branches/%s" % (self.connection.url, self.id, name), + headers=HEADERS, + auth=self.connection.auth) + if response.status_code == requests.codes.ok: + return Branch(self, response.json()) + else: + print "Oops, something went wrong." + return response + def checkout(self, commit): + branch = self.branch if self.branch else 'master' data = {} - response = requests.post("%s/data/%s/commits/%s/checkout/" % (self.connection.url, self.id, commit), + response = requests.post("%s/data/%s/branches/%scommits/%s/checkout/" % (self.connection.url, + self.id, + branch, + commit), data = json.dumps(data), headers=HEADERS, auth=self.connection.auth) diff --git a/quilt/util.py b/quilt/util.py index d0922a7..cc89677 100644 --- a/quilt/util.py +++ b/quilt/util.py @@ -85,7 +85,7 @@ def delete(self): def merge(self, other): data = {'name' : other.name} connection = self.table.connection - response = requests.post("%s/data/%s/branches/%s/merge" % (connection.url, + response = requests.post("%s/data/%s/branches/%s/merge/" % (connection.url, self.table.id, self.name), data=json.dumps(data), From b18186a265e8f970822489fe9d9b9b4d959ded4d Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 14 Sep 2016 17:53:58 -0700 Subject: [PATCH 27/56] Bugfix: set col sqlnames in save_df MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It’s important to set the sqlname/field for columns in save_df so that a subsequent df call will return an identical df. --- quilt/connection.py | 6 +++--- quilt/table.py | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/quilt/connection.py b/quilt/connection.py index 083afbc..5f9725f 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -174,7 +174,7 @@ def save_df(self, df, name, description=None): print "Oops, unrecognized type %s in Data Frame" % dt return None - schema['columns'].append({'name' : col, 'type' : ctype }) + schema['columns'].append({'name' : col, 'sqlname' : col, 'type' : ctype }) response = requests.post("%s/tables/" % self.url, data = json.dumps(schema), @@ -191,6 +191,7 @@ def save_df(self, df, name, description=None): maxreq = 40 nrows = len(df.index) res = [] + for start in range(0, nrows, chunksz): end = start + chunksz @@ -208,8 +209,7 @@ def save_df(self, df, name, description=None): buffer = df[start:end].to_json(orient='records') res.append((table.create_json_async(buffer, callback=status_check), buffer)) - - + return table def upload(self, filepath): diff --git a/quilt/table.py b/quilt/table.py index 57776cc..0905acc 100644 --- a/quilt/table.py +++ b/quilt/table.py @@ -193,7 +193,8 @@ def df(self): 'Image' : sa.String } columns = [sa.Column(c.field, type_map[c.type]) for c in self.columns] - table = sa.Table(self.sqlname, sa.MetaData(),*columns) + viewname = "%s_%s" % (self.sqlname, self.branch) if self.branch else self.sqlname + table = sa.Table(viewname, sa.MetaData(),*columns) stmt = sa.select([table]) if self._ordering_fields: From df8b60f6793f2be7754efb496d10e5e2fa5278c3 Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 00:55:56 -0700 Subject: [PATCH 28/56] fix odbc url lookup --- quilt/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/quilt/connection.py b/quilt/connection.py index 083afbc..b3fd643 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -38,7 +38,7 @@ def __init__(self, username, url=QUILT_URL): self.userid = userdata['id'] self.profile = userdata['profile'] if SQLALCHEMY: - self._sqlengine = sa.create_engine(userdata.get('odbc').get('url')) + self._sqlengine = sa.create_engine(userdata['profile']['odbc']['url']) else: print "Login Failed. Please check your credentials and try again." From eee7a491cbc2c4e3e07fb51dfb0deeef3b490148 Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 08:32:36 -0700 Subject: [PATCH 29/56] add tests --- README.rst | 11 +++++++++++ quilt/connection.py | 33 +++++++++++++++++---------------- requirements.txt | 8 ++++++++ tests/__init__.py | 0 tests/test_connection.py | 23 +++++++++++++++++++++++ 5 files changed, 59 insertions(+), 16 deletions(-) create mode 100644 requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/test_connection.py diff --git a/README.rst b/README.rst index 9018fa1..514a029 100644 --- a/README.rst +++ b/README.rst @@ -442,3 +442,14 @@ Returns ^^^^^^^ JSON object representing the result table. + +Development +----------- + +Python 2.7 tests in-progress. Tests run with: + +.. code:: python + + pip install -r requirements.text + pip install pytest + pytest tests diff --git a/quilt/connection.py b/quilt/connection.py index b3fd643..d7698aa 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -1,7 +1,8 @@ import json -import getpass +from getpass import getpass import requests import sys +import os from mimetypes import MimeTypes from multiprocessing import Pool @@ -16,11 +17,11 @@ def status_check(response): class Connection(object): - + def __init__(self, username, url=QUILT_URL): self.url = url self.username = username - self.password = getpass.getpass() + self.password = os.environ.get('QUILT_PASSWORD') or getpass() self.auth = requests.auth.HTTPBasicAuth(self.username, self.password) self.status_code = None self.userid = None @@ -71,7 +72,7 @@ def search(self, search): print "Oops, something went wrong." print "response=%s" % response.status_code - return matches + return matches @property def tables(self): @@ -110,8 +111,8 @@ def files(self): def get_table(self, table_id, branch=None): response = requests.get("%s/tables/%s/" % (self.url, table_id), headers=HEADERS, - auth=self.auth) - + auth=self.auth) + if response.status_code == requests.codes.ok: return Table(self, response.json(), branch=branch) else: @@ -127,7 +128,7 @@ def create_table(self, name, description=None, columns=None, inputfile=None): if columns: print "Please specify either a set of columns or an input file, not both" return None - + if isinstance(inputfile, File): data['csvfile'] = inputfile.fullpath else: @@ -135,7 +136,7 @@ def create_table(self, name, description=None, columns=None, inputfile=None): data['csvfile'] = f.fullpath elif columns: data['columns'] = columns - + response = requests.post("%s/tables/" % self.url, data = json.dumps(data), headers=HEADERS, @@ -158,7 +159,7 @@ def save_df(self, df, name, description=None): 'int64' : 'Number', 'unicode' : 'String', 'datetime64[ns, UTC]' : 'DateTime' } - + if not PANDAS: print "Install pandas to use DataFrames: http://pandas.pydata.org/" return None @@ -166,15 +167,15 @@ def save_df(self, df, name, description=None): schema = { 'name' : name, 'columns' : [] } if description: schema['description'] = description - + for i, col in enumerate(df.columns): dt = df.dtypes[i] ctype = type_map.get(str(dt), None) if not ctype: print "Oops, unrecognized type %s in Data Frame" % dt return None - - schema['columns'].append({'name' : col, 'type' : ctype }) + + schema['columns'].append({'name' : col, 'type' : ctype }) response = requests.post("%s/tables/" % self.url, data = json.dumps(schema), @@ -201,15 +202,15 @@ def save_df(self, df, name, description=None): if not r.successful(): print "Retrying:" print b - res.append((t.create_async(b, status_check), b)) + res.append((t.create_async(b, status_check), b)) if len(res) > maxreq: r, b = res[0] r.wait() - + buffer = df[start:end].to_json(orient='records') res.append((table.create_json_async(buffer, callback=status_check), buffer)) - - + + return table def upload(self, filepath): diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..b7aa744 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +numpy==1.11.1 +pandas==0.18.1 +psycopg2==2.6.2 +python-dateutil==2.5.3 +pytz==2016.6.1 +requests==2.11.1 +six==1.10.0 +SQLAlchemy==1.0.15 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_connection.py b/tests/test_connection.py new file mode 100644 index 0000000..dcba9c5 --- /dev/null +++ b/tests/test_connection.py @@ -0,0 +1,23 @@ +import os +import pytest +from quilt import Connection + + +def test_connection_login_ok(): + """ Test a good login. Before running test, in terminal run: + $ export QUILT_USERNAME="valid-username" + $ export QUILT_PASSWORD="valid-password" """ + username = os.environ.get('QUILT_USERNAME') + password = os.environ.get('QUILT_PASSWORD') + conn = Connection(username) + assert conn.username == username + assert conn.password == password + assert conn.status_code == 200 + + +def test_connection_login_bad(): + """ Test a bad login. """ + with pytest.raises(TypeError): + conn = Connection() + conn = Connection('0') + assert conn.status_code == 403 From da2635f018ecf1b891708e87292adf7f06028ec3 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Thu, 15 Sep 2016 13:30:36 -0700 Subject: [PATCH 30/56] fix broken links --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 514a029..17b79c8 100644 --- a/README.rst +++ b/README.rst @@ -229,12 +229,12 @@ Optional prep (your steps may vary) File formats in this example ---------------------------- -- `ENCDOE broadPeak format`_ +- `ENCODE broadPeak format `_ Resources --------- -- `ENCODE Project`_ +- `ENCODE Project `_ REST API ======== From 5bc748ad1ba34f1339ba29ce631512894c5ed83f Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Thu, 15 Sep 2016 13:34:14 -0700 Subject: [PATCH 31/56] strip REST doc; now in quiltdata/rest-api repository --- README.rst | 206 ----------------------------------------------------- 1 file changed, 206 deletions(-) diff --git a/README.rst b/README.rst index 17b79c8..cce015f 100644 --- a/README.rst +++ b/README.rst @@ -236,212 +236,6 @@ Resources - `ENCODE Project `_ -REST API -======== - -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Action | Endpoint | Details | -+================================+======================================+===========================================+ -| New table | ``POST /tables/`` | `See below`_ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Delete table | ``DELETE /tables/TABLE_ID/`` | `See below <#delete-table>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Update table meta-data | ``PATCH /tables/TABLE_ID`` | `See below <#update-table-meta-data>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Add column to table | ``POST /tables/TABLE_ID/columns/`` | `See below <#add-column-to-table>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Append row to table | ``POST /data/TABLE_ID/rows/`` | `See below <#append-row>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Get table rows | ``GET /data/TABLE_ID/rows`` | `See below <#get-rows>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Get table row | ``GET /data/TABLE_ID/rows/ROW_ID`` | `See below <#get-row>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ -| Genome intersect or subtract | ``POST /genemath/`` | `See below <#intersect-or-subtract>`__ | -+--------------------------------+--------------------------------------+-------------------------------------------+ - -Notes - -- For all REST calls, the content-type is ``application/JSON``. -- Description fields automatically linkify URLs and support - ``, , , , `` tags - -Tables ------- - -Create table -~~~~~~~~~~~~ - -``POST /tables/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'description': text `, , , , ` tags supported; automatic linkification of URLs - 'columns': [ - { - 'name': string, - 'sqlname': optional string, - 'description': optional text, - 'type' : one of 'String', 'Number', 'Image', 'Text' - }, - ... - ] - } - -Returns -^^^^^^^ - -Table data as JSON object, includes ``id`` field with the table’s -identifier. - -Add column to table -~~~~~~~~~~~~~~~~~~~ - -``POST /tables/TABLE_ID/columns/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'sqlname': optional string, - 'description': text, - 'type': one of 'String', 'Number', 'Image', or 'Text' - } - -Returns -^^^^^^^ - -Column data as JSON object, includes ``id`` field with the column’s -identifier. - -Delete table -~~~~~~~~~~~~ - -``DELETE /tables/TABLE_ID`` - -Update table meta-data -~~~~~~~~~~~~~~~~~~~~~~ - -``PATCH /tables/TABLE_ID`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'name': string, - 'description': text - } - -Table Data ----------- - -- Use column ``sqlname`` as keys in input data - -Append row -~~~~~~~~~~ - -``POST /data/TABLE_ID/rows/`` - -Data format -^^^^^^^^^^^ - -.. code:: javascript - - [ - {columnSqlname0: value0, columnSqlname1 : value1, ... }, - ... - ] - -Get rows -~~~~~~~~ - -``GET /data/TABLE_ID/rows`` \* Rows are keyed by the Quilt Row ID field -``qrid`` \* NOTE: Currently limited to the first 500 rows - -Returns -^^^^^^^ - -Row data as JSON object, keyed by column.sqlname. - -Get row -~~~~~~~ - -``GET /data/TABLE_ID/rows/ROW_ID`` - -Returns -^^^^^^^ - -Row data as JSON object, keyed by column.sqlname. - -Quilt tables ------------- - -Join -~~~~ - -``POST /quilts/`` #### Data format - -.. code:: javascript - - { - 'left_table_id': int, - 'right_table_id': int, - 'left_column_id': int, - 'right_column_id': int, - 'jointype': one of 'inner', 'leftOuter', 'firstMatch' - } - -Returns -^^^^^^^ - -Quilt info as JSON object, includes ``sqlname`` field with the quilt’s -identifier. - -Undo join -~~~~~~~~~ - -``DELETE /quilts/QUILT_SQLNAME`` - -Genome Math ------------ - -- Performs a gene math operation on two tables -- Creates a new table with the result. -- Columns are specified by ``column.id``. - -Intersect or subtract -~~~~~~~~~~~~~~~~~~~~~ - -``POST /genemath/`` - -Data Format -^^^^^^^^^^^ - -.. code:: javascript - - { - 'operator': one of 'Intersect' or 'Subtract', - 'left_chr': integer (column id), - 'left_start': integer (column id), - 'left_end': integer (column id), - 'right_chr': integer (column id), - 'right_start': integer (column id), - 'right_end': integer (column id) - } - -Returns -^^^^^^^ - -JSON object representing the result table. Development ----------- From 8cf0e8679ef59ac399e5a41cc5e6ab5ab9ca6ed5 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Thu, 15 Sep 2016 13:40:54 -0700 Subject: [PATCH 32/56] clean up instructional text --- README.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/README.rst b/README.rst index cce015f..b2637b6 100644 --- a/README.rst +++ b/README.rst @@ -209,10 +209,7 @@ Optional prep (your steps may vary) cd downloads gunzip *.gz - .. rubric:: Upload to Quilt - :name: upload-to-quilt - -#. | Use ``data_set.py`` to create individual data sets (see +#. | Use ``data_set.py`` to create data sets on Quilt (see ``python data_set.py --help``). | You will need a Quilt username and password. Or use ``batch.py`` to create multiple data sets. From 62045cb99afae95c6a7141fce059ee2b8fb6e700 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Thu, 15 Sep 2016 13:56:11 -0700 Subject: [PATCH 33/56] attempt to use PUBLIC flag; needs testing --- batch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/batch.py b/batch.py index dc196eb..d210aa8 100644 --- a/batch.py +++ b/batch.py @@ -36,7 +36,7 @@ def process(argv): name = 'ENCODE ChIP-seq: ' + ' '.join(pretags) tags = map(lambda x: '#' + x, CORE_TAGS + pretags) description = ' '.join(tags) + '\n' + CORE_DESC - args = "-u USERNAME -n '%s' -d '%s' -f downloads/%s -p True -x '%s'" % (name, description, l, passwd) + args = "-u USERNAME -n '%s' -d '%s' -f downloads/%s -p '%s' -x '%s'" % (name, description, l, PUBLIC, passwd) argv = shlex.split(args) #create data set on Quilt dataset.create(argv) From 776251206a8c1d4a057e29749bf903da82d44e11 Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 21:26:49 -0700 Subject: [PATCH 34/56] add travis --- .travis.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..9b3cd96 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,15 @@ +language: python +python: +- '2.7' + +env: + global: + - secure: CUtSldmJk7y2PpimxpOWKNi30Ve/pQuzSrGxQyvBE3ny8nTFjzIK+DUcqCVgNDmCqm3AAkE/DsMyCgRCxTpXGW35mhEyCCUlqPlULgGBoBdx5KtMbgOHq+W/+R1utrmliS5N6GexcydAMQ/CaepKdpeyf0fgpLFes6J1WYG/2JGErHS5Vvbprv9E8UMuG7WoSJXtvSK6Cnz06wWVfGkvw5nLbftuCTfKgCzXU5CK6lFUQlDqJwC6bqsuuGCiWI34OnDmbThHZkNG7JfEapIdeWMoHR93LodviShuiGxhpS/WDewGEwzHXmys6oo6Z9/kLJdcJOCmxdFvYWQUGNxDwOSSFpOjKNXIUR4n6rfqOwarPzRGWQd4C7NLKjkiMymBk1qb016+wlGsicFBktPNV5O9KLL6aytRP34Q30yuAhqcQufVw37G3CdsEKEO6rXghIG81RP2AH64fC12TG29gnQmZQTW1Yv99J61X1+jsCAVVcx1rISO5JYRvMWCBuJK0Arpsjj0PE41/3yUT72TgPRZGeh1wDOvl6SCGNgo5xYoMzdhsoCxm//9rYTW/3HmEUtB59+9M9E9+IZnaT+vN+sWOAElPDEsgu1Kn90GIADfQ/wL8B0pstrVrL2zZSrqCyJxlxE2frq4yNJo74hMizE+QTy2fKumtljLN2x7WZM= + matrix: + - QUILT_SITE=https://quiltdata.com + - QUILT_SITE=https://quilt-heroku.herokuapp.com + +before_install: pip install pytest pytest-cov +install: pip install -r requirements.txt +script: python -m pytest --cov=quilt/ tests +after_success: bash <(curl -s https://codecov.io/bash) From 95baeceb22f8e1a2cd78206738950f5deec5dcaf Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 21:26:53 -0700 Subject: [PATCH 35/56] add travis From ca1ed213e8d79f02d5d01eb65170e396910af04e Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 22:10:54 -0700 Subject: [PATCH 36/56] add tests for staging url --- .travis.yml | 4 ++-- tests/test_connection.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9b3cd96..723f097 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,8 +6,8 @@ env: global: - secure: CUtSldmJk7y2PpimxpOWKNi30Ve/pQuzSrGxQyvBE3ny8nTFjzIK+DUcqCVgNDmCqm3AAkE/DsMyCgRCxTpXGW35mhEyCCUlqPlULgGBoBdx5KtMbgOHq+W/+R1utrmliS5N6GexcydAMQ/CaepKdpeyf0fgpLFes6J1WYG/2JGErHS5Vvbprv9E8UMuG7WoSJXtvSK6Cnz06wWVfGkvw5nLbftuCTfKgCzXU5CK6lFUQlDqJwC6bqsuuGCiWI34OnDmbThHZkNG7JfEapIdeWMoHR93LodviShuiGxhpS/WDewGEwzHXmys6oo6Z9/kLJdcJOCmxdFvYWQUGNxDwOSSFpOjKNXIUR4n6rfqOwarPzRGWQd4C7NLKjkiMymBk1qb016+wlGsicFBktPNV5O9KLL6aytRP34Q30yuAhqcQufVw37G3CdsEKEO6rXghIG81RP2AH64fC12TG29gnQmZQTW1Yv99J61X1+jsCAVVcx1rISO5JYRvMWCBuJK0Arpsjj0PE41/3yUT72TgPRZGeh1wDOvl6SCGNgo5xYoMzdhsoCxm//9rYTW/3HmEUtB59+9M9E9+IZnaT+vN+sWOAElPDEsgu1Kn90GIADfQ/wL8B0pstrVrL2zZSrqCyJxlxE2frq4yNJo74hMizE+QTy2fKumtljLN2x7WZM= matrix: - - QUILT_SITE=https://quiltdata.com - - QUILT_SITE=https://quilt-heroku.herokuapp.com + - QUILT_URL="https://quiltdata.com" + - QUILT_URL="https://quilt-heroku.herokuapp.com" before_install: pip install pytest pytest-cov install: pip install -r requirements.txt diff --git a/tests/test_connection.py b/tests/test_connection.py index dcba9c5..41c2744 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -9,7 +9,8 @@ def test_connection_login_ok(): $ export QUILT_PASSWORD="valid-password" """ username = os.environ.get('QUILT_USERNAME') password = os.environ.get('QUILT_PASSWORD') - conn = Connection(username) + url = os.environ.get('QUILT_URL') + conn = Connection(username=username, url=url) assert conn.username == username assert conn.password == password assert conn.status_code == 200 From 6c8e12856edca4e536e4a1dc866cfa425e128259 Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 22:32:31 -0700 Subject: [PATCH 37/56] add url condition --- quilt/connection.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/quilt/connection.py b/quilt/connection.py index d7698aa..256afe3 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -39,7 +39,10 @@ def __init__(self, username, url=QUILT_URL): self.userid = userdata['id'] self.profile = userdata['profile'] if SQLALCHEMY: - self._sqlengine = sa.create_engine(userdata['profile']['odbc']['url']) + if self.url == 'https://quiltdata.com': + self._sqlengine = sa.create_engine(userdata['profile']['odbc']['url']) + if self.url == 'https://quilt-heroku.herokuapp.com' + self._sqlengine = sa.create_engine(userdata['odbc']['url']) else: print "Login Failed. Please check your credentials and try again." From eb5b61008302b0f9f96963748b3cc2e7e2ad3218 Mon Sep 17 00:00:00 2001 From: brennv Date: Thu, 15 Sep 2016 22:44:36 -0700 Subject: [PATCH 38/56] fix conditional --- quilt/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/quilt/connection.py b/quilt/connection.py index 256afe3..ad56df6 100644 --- a/quilt/connection.py +++ b/quilt/connection.py @@ -41,7 +41,7 @@ def __init__(self, username, url=QUILT_URL): if SQLALCHEMY: if self.url == 'https://quiltdata.com': self._sqlengine = sa.create_engine(userdata['profile']['odbc']['url']) - if self.url == 'https://quilt-heroku.herokuapp.com' + if self.url == 'https://quilt-heroku.herokuapp.com': self._sqlengine = sa.create_engine(userdata['odbc']['url']) else: print "Login Failed. Please check your credentials and try again." From 02871ab00cdaf206c4d91970929a419d93e79337 Mon Sep 17 00:00:00 2001 From: brennv Date: Fri, 16 Sep 2016 11:25:47 -0700 Subject: [PATCH 39/56] add test_tables --- tests/test_tables.py | 71 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 tests/test_tables.py diff --git a/tests/test_tables.py b/tests/test_tables.py new file mode 100644 index 0000000..aa2173d --- /dev/null +++ b/tests/test_tables.py @@ -0,0 +1,71 @@ +import os +import pytest +from quilt import Connection +from random import randint + + +username = os.environ.get('QUILT_USERNAME') +url = os.environ.get('QUILT_URL') + + +def test_tables(): + """ Test tables. """ + conn = Connection(username=username, url=url) + results = conn.tables + assert isinstance(results, list) + + +def test_search_tables(): + conn = Connection(username=username, url=url) + results = conn.search('term') + assert isinstance(results, list) + + +def test_get_table_with_bad_ids(): + conn = Connection(username=username, url=url) + # with pytest.raises(TableNotFoundError): + result = conn.get_table(10000000000) + assert isinstance(result, type(None)) # hacky + result = conn.get_table('test string') + assert isinstance(result, type(None)) + + +def test_create_empty_table(): + conn = Connection(username=username, url=url) + t1 = conn.create_table(name='test', description='test empty') + t2 = conn.get_table(t1.id) + assert t1 == t2 + + +def test_create_table_with_duplicate_attributes(): + conn = Connection(username=username, url=url) + t1 = conn.create_table(name='test', description='test empty') + t2 = conn.create_table(name='test', description='test empty') + assert t2.id == t1.id + 1 + assert t1.sqlname != t2.sqlname + + +def test_create_table_from_file(): + seed = str(randint(10000, 99999)) + test_name = 'test' + seed + test_file = test_name + '.csv' + data = 'col1,col2\n1,a\n2,b\n' + with open(test_file, 'w') as f: + f.write(data) + test_description = 'test file ' + test_name + conn = Connection(username=username, url=url) + t1 = conn.create_table(name=test_name, description=test_description, + inputfile=test_file) + assert ['col1', 'col2'] == [col.name for col in t1.columns] + fields = [col.field for col in t1.columns] + rows = [row for row in t1] + # expected_row1 = {fields[0]: 1, fields[1]: 'a', u'qgrid': 1} + # expected_row2 = {fields[0]: 2, fields[1]: 'b', u'qgrid': 2} + # assert rows == [expected_row1, expected_row2] + assert rows[0][fields[0]] == 1.0 # should be integer? + assert rows[0][fields[1]] == 'a' + assert rows[1][fields[0]] == 2.0 + assert rows[1][fields[1]] == 'b' + t2 = conn.get_table(t1.id) + assert t1 == t2 + os.remove(test_file) From 57db8febec2d3eab987eff80e7c3c076eb0fe1db Mon Sep 17 00:00:00 2001 From: brennv Date: Fri, 16 Sep 2016 11:38:14 -0700 Subject: [PATCH 40/56] add fail notes --- tests/test_tables.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_tables.py b/tests/test_tables.py index aa2173d..3c666e4 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -61,10 +61,12 @@ def test_create_table_from_file(): rows = [row for row in t1] # expected_row1 = {fields[0]: 1, fields[1]: 'a', u'qgrid': 1} # expected_row2 = {fields[0]: 2, fields[1]: 'b', u'qgrid': 2} - # assert rows == [expected_row1, expected_row2] - assert rows[0][fields[0]] == 1.0 # should be integer? + # assert rows == [expected_row1, expected_row2] # this fails + assert rows[0][fields[0]] == 1 + # assert isinstance(rows[0][fields[0]], int) # this fails, it's 1.0 not 1 assert rows[0][fields[1]] == 'a' - assert rows[1][fields[0]] == 2.0 + assert rows[1][fields[0]] == 2 + # assert isinstance(rows[1][fields[0]], int) # this fails, it's 2.0 not 2 assert rows[1][fields[1]] == 'b' t2 = conn.get_table(t1.id) assert t1 == t2 From 17de2563f80715ac79215a57b0e9def818ebe34e Mon Sep 17 00:00:00 2001 From: brennv Date: Fri, 16 Sep 2016 22:55:02 -0700 Subject: [PATCH 41/56] add save_df tests --- tests/test_tables.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/tests/test_tables.py b/tests/test_tables.py index 3c666e4..9669023 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -1,44 +1,40 @@ import os -import pytest -from quilt import Connection from random import randint +from quilt import Connection +import pandas as pd username = os.environ.get('QUILT_USERNAME') url = os.environ.get('QUILT_URL') +conn = Connection(username=username, url=url) def test_tables(): """ Test tables. """ - conn = Connection(username=username, url=url) results = conn.tables assert isinstance(results, list) def test_search_tables(): - conn = Connection(username=username, url=url) results = conn.search('term') assert isinstance(results, list) def test_get_table_with_bad_ids(): - conn = Connection(username=username, url=url) # with pytest.raises(TableNotFoundError): result = conn.get_table(10000000000) - assert isinstance(result, type(None)) # hacky + assert isinstance(result, type(None)) # hack result = conn.get_table('test string') assert isinstance(result, type(None)) def test_create_empty_table(): - conn = Connection(username=username, url=url) t1 = conn.create_table(name='test', description='test empty') t2 = conn.get_table(t1.id) assert t1 == t2 def test_create_table_with_duplicate_attributes(): - conn = Connection(username=username, url=url) t1 = conn.create_table(name='test', description='test empty') t2 = conn.create_table(name='test', description='test empty') assert t2.id == t1.id + 1 @@ -53,21 +49,30 @@ def test_create_table_from_file(): with open(test_file, 'w') as f: f.write(data) test_description = 'test file ' + test_name - conn = Connection(username=username, url=url) t1 = conn.create_table(name=test_name, description=test_description, inputfile=test_file) assert ['col1', 'col2'] == [col.name for col in t1.columns] fields = [col.field for col in t1.columns] rows = [row for row in t1] - # expected_row1 = {fields[0]: 1, fields[1]: 'a', u'qgrid': 1} - # expected_row2 = {fields[0]: 2, fields[1]: 'b', u'qgrid': 2} - # assert rows == [expected_row1, expected_row2] # this fails assert rows[0][fields[0]] == 1 - # assert isinstance(rows[0][fields[0]], int) # this fails, it's 1.0 not 1 assert rows[0][fields[1]] == 'a' assert rows[1][fields[0]] == 2 - # assert isinstance(rows[1][fields[0]], int) # this fails, it's 2.0 not 2 assert rows[1][fields[1]] == 'b' t2 = conn.get_table(t1.id) assert t1 == t2 os.remove(test_file) + + +def test_create_table_from_df(): + cols = ['col1', 'col2'] + data = {1: 'a', 2: 'b'} + df = pd.DataFrame(list(data.iteritems()), columns=cols) + t1 = conn.save_df(df, name='testDataFrame', description="test") + t2 = conn.get_table(t1.id) + assert ['col1', 'col2'] == [col.name for col in t2.columns] + fields = [col.field for col in t2.columns] + rows = [row for row in t2] + assert rows[0][fields[0]] == 1 + assert rows[0][fields[1]] == 'a' + assert rows[1][fields[0]] == 2 + assert rows[1][fields[1]] == 'b' From bc34fff81121e3d1414e991c966615c483f33d2c Mon Sep 17 00:00:00 2001 From: brennv Date: Sat, 17 Sep 2016 10:08:18 -0700 Subject: [PATCH 42/56] specify dependency versions --- setup.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index 7f2c84a..a5cd2b8 100644 --- a/setup.py +++ b/setup.py @@ -20,22 +20,19 @@ def readme(): 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - # for more options see https://pypi.python.org/pypi?%3Aaction=list_classifiers ], author='quiltdata', - author_email='founders@quiltdata.io', # same as you regestered - license='LICENSE.txt', # you will probably have to add a LICENSE.txt to the repo + author_email='founders@quiltdata.io', + license='LICENSE.txt', url='https://github.com/quiltdata/API', download_url='https://github.com/quiltdata/API/tarball/0.1.5', keywords='quiltdata api social shareable data platform', install_requires=[ - 'requests', - 'numpy', - 'pandas', - 'psycopg2', - 'sqlalchemy', + 'requests==2.11.1', + 'numpy==1.11.1', + 'pandas==0.18.1', + 'psycopg2==2.6.2', + 'sqlalchemy==1.0.15', ], include_package_data=True, zip_safe=False) From e350975a8631dc3bcdf6ba5f7318f8422ca6bfc2 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Sat, 17 Sep 2016 10:41:50 -0700 Subject: [PATCH 43/56] Update release version --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a5cd2b8..736bc1e 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='quilt', packages=['quilt'], - version='0.1.5', + version='0.1.6', description='Quilt Python API https://quiltdata.com', long_description=readme(), classifiers=[ @@ -25,7 +25,7 @@ def readme(): author_email='founders@quiltdata.io', license='LICENSE.txt', url='https://github.com/quiltdata/API', - download_url='https://github.com/quiltdata/API/tarball/0.1.5', + download_url='https://github.com/quiltdata/API/tarball/0.1.6', keywords='quiltdata api social shareable data platform', install_requires=[ 'requests==2.11.1', From 75d757e12ca3743664255b1f8a1deca0e591b081 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 11:47:32 -0700 Subject: [PATCH 44/56] wip --- batch.py => ENCODE-example/batch.py | 0 create-dataset.py | 92 +++++++++++++++++++++++++++++ dataset.py | 83 -------------------------- 3 files changed, 92 insertions(+), 83 deletions(-) rename batch.py => ENCODE-example/batch.py (100%) create mode 100644 create-dataset.py delete mode 100644 dataset.py diff --git a/batch.py b/ENCODE-example/batch.py similarity index 100% rename from batch.py rename to ENCODE-example/batch.py diff --git a/create-dataset.py b/create-dataset.py new file mode 100644 index 0000000..d1e0834 --- /dev/null +++ b/create-dataset.py @@ -0,0 +1,92 @@ +#!/usr/bin/python +#python cli utility for creating data sets (from files) on quiltdata.com +import argparse +import getpass +import json +import os +import requests +import sys + +from requests.auth import HTTPBasicAuth + +HEADERS = {'Content-Type': 'application/json', 'Accept': 'application/json'} + + +def create(argv): + parser = argparse.ArgumentParser(description='Create a data set on Quilt.') + parser.add_argument('-e', '--endpoint', default='https://quiltdata.com', help='API endpoint root URL (without terminating "/")') + parser.add_argument('-u', '--user', default=None, help='Quilt username', required=True) + parser.add_argument('-n', '--name', default=None, help='Data set name', required=True) + parser.add_argument('-d', '--description', default=None, help='Data set description') + parser.add_argument('-f', '--file', default=None, help='Path to CSV, TXT, XLS, BED, or other supported format') + parser.add_argument('-p', '--public', default=False, help='True for public, False for private. Private is default.') + parser.add_argument('-x', '--password', default=None, help='Password. For script-to-script use only. Not for commandline use, as password would remain in shell history.') + + args = parser.parse_args(argv) + passwd = args.password or getpass.getpass() + + signature = None + if (args.file): + file_name = os.path.basename(args.file) + #get signed S3 URL + response = get_upload_url(file_name, args, passwd) + print response.json() + check_response(response, 'file upload failed') + signature = json.loads(response.json()) + #request header + headers = { + 'Content-Type': 'text/plain', + 'x-amz-acl': signature['x-amz-acl'] + } + destination = signature['signed_request'] + #local file handle + data = open(args.file, 'rb') + #upload file + upload = requests.put(destination, data=data, headers=headers) + check_response(upload, 's3 signing failed') + #end if(args.file) + + #assemble schema (.columns is required, even if empty) + #TODO specify format for columns + #See https://github.com/quiltdata/python-api/issues/41 + file_path = signature['path'] if signature and signature['path'] else None + schema = { + 'csvfile': file_path, + 'name': args.name, + 'description': args.description, + 'columns': [], + 'is_public': args.public + } + #create data set + endpoint = "%s/tables/" % (args.endpoint) + create = requests.post(endpoint, + auth=HTTPBasicAuth(args.user, passwd), + data=json.dumps(schema), + headers=HEADERS) + check_response(create, 'problem creating data set') +#end main + +# get_upload_url fetch signed URL from backend +def get_upload_url(file_name, args, passwd, file_type='text/plain'): + print(file_name) + data = {'fileName': file_name, 'fileType': file_type} + endpoint = "%s/files/" % (args.endpoint) + #use post not get to avoid enumerating all files in response + return requests.post( + endpoint, + auth=HTTPBasicAuth(args.user, passwd), + params=data, + files={'file': file_name}, + headers=HEADERS) + + +def check_response(response, msg): + if not response.ok: + sys.stderr.write('Oops, %s\n' % msg) + detail = json.loads(response.text)['detail'] + sys.stderr.write('%s %s: %s\n' % (response.status_code, response.reason, detail)) + sys.exit() + + +if __name__ == "__main__": + create(sys.argv[1:]) diff --git a/dataset.py b/dataset.py deleted file mode 100644 index 4a419d0..0000000 --- a/dataset.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/python -import argparse -import getpass -import json -import os -import requests -import sys - -from requests.auth import HTTPBasicAuth - -HEADERS = {"Content-Type": "application/json", "Accept": "application/json"} - - -def create(argv): - parser = argparse.ArgumentParser(description='Create a Quilt data set.') - parser.add_argument('-e', '--endpoint', default='https://quiltdata.com', help='API endpoint root URL (without terminating "/")') - parser.add_argument('-u', '--user', default=os.environ['USER'], help='Quilt username') - parser.add_argument('-n', '--name', default='test_data_set', help='Data set name') - parser.add_argument('-d', '--description', default='', help='Data set description') - parser.add_argument('-f', '--file', default='', help='Path to CSV, TXT, TSV, BED, or other supported format') - parser.add_argument('-p', '--public', default=False, help='True for public, False for private. Private is default.') - parser.add_argument('-x', '--password', default=None, help='Password. NOT for commandline use, as password would remain in shell history.') - args = parser.parse_args(argv) - - passwd = None - if(args.password == None): - passwd = getpass.getpass() - else: - passwd = args.password - - #get_upload_url fetch signed URL from backend - def get_upload_url(file_name, file_type='text/plain'): - data = {'fileName': file_name, 'fileType': file_type} - endpoint = "%s/s3args/" % (args.endpoint) - return requests.get(endpoint, - auth=HTTPBasicAuth(args.user, passwd), - params=data, - headers=HEADERS) - #end get_upload_url - - - file_name = os.path.basename(args.file) - #get signed S3 URL - response = get_upload_url(file_name) - check_response(response, 'problem signing file') - signature = json.loads(response.json()) - #request header - headers = { - 'Content-Type': 'text/plain', - 'x-amz-acl': signature['x-amz-acl'] - } - destination = signature['signed_request'] - #local file handle - files = {file_name: open(args.file, 'rb')} - data = open(args.file, 'rb') - #upload file - upload = requests.put(destination, data=data, headers=headers) - check_response(upload, 's3 signing error') - #assemble schema (columns is required) - schema = { - 'csvfile': signature['path'], - 'name': args.name, - 'description': args.description, - 'columns': [], - 'is_public': args.public - } - #create data set - endpoint = "%s/tables/" % (args.endpoint) - create = requests.post(endpoint, - auth=HTTPBasicAuth(args.user, passwd), - data=json.dumps(schema), - headers=HEADERS) - check_response(create, 'problem creating data set') -#end main - - -def check_response(response, msg): - if response.status_code != 200: - sys.stderr.write('%s: %s\n\t%s\n' % (msg, response, response.text)) - - -if __name__ == "__main__": - create(sys.argv[1:]) From db17415f731f5870c865e715b5389b4240441f16 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 11:48:44 -0700 Subject: [PATCH 45/56] clean repo of stale examples --- create-dataset.py | 92 - examples/babysleep.py | 130 - examples/crispr.py | 61 - examples/louise.py | 48 - examples/nests.py | 126 - get-files-to-upload/README.md | 2 - get-files-to-upload/curl-all.py | 26 - get-files-to-upload/extract-matching-files.py | 27 - .../wgEncodeBroadHistone-All.txt | 2367 ----------------- 9 files changed, 2879 deletions(-) delete mode 100644 create-dataset.py delete mode 100644 examples/babysleep.py delete mode 100644 examples/crispr.py delete mode 100644 examples/louise.py delete mode 100644 examples/nests.py delete mode 100644 get-files-to-upload/README.md delete mode 100644 get-files-to-upload/curl-all.py delete mode 100644 get-files-to-upload/extract-matching-files.py delete mode 100644 get-files-to-upload/wgEncodeBroadHistone-All.txt diff --git a/create-dataset.py b/create-dataset.py deleted file mode 100644 index d1e0834..0000000 --- a/create-dataset.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/python -#python cli utility for creating data sets (from files) on quiltdata.com -import argparse -import getpass -import json -import os -import requests -import sys - -from requests.auth import HTTPBasicAuth - -HEADERS = {'Content-Type': 'application/json', 'Accept': 'application/json'} - - -def create(argv): - parser = argparse.ArgumentParser(description='Create a data set on Quilt.') - parser.add_argument('-e', '--endpoint', default='https://quiltdata.com', help='API endpoint root URL (without terminating "/")') - parser.add_argument('-u', '--user', default=None, help='Quilt username', required=True) - parser.add_argument('-n', '--name', default=None, help='Data set name', required=True) - parser.add_argument('-d', '--description', default=None, help='Data set description') - parser.add_argument('-f', '--file', default=None, help='Path to CSV, TXT, XLS, BED, or other supported format') - parser.add_argument('-p', '--public', default=False, help='True for public, False for private. Private is default.') - parser.add_argument('-x', '--password', default=None, help='Password. For script-to-script use only. Not for commandline use, as password would remain in shell history.') - - args = parser.parse_args(argv) - passwd = args.password or getpass.getpass() - - signature = None - if (args.file): - file_name = os.path.basename(args.file) - #get signed S3 URL - response = get_upload_url(file_name, args, passwd) - print response.json() - check_response(response, 'file upload failed') - signature = json.loads(response.json()) - #request header - headers = { - 'Content-Type': 'text/plain', - 'x-amz-acl': signature['x-amz-acl'] - } - destination = signature['signed_request'] - #local file handle - data = open(args.file, 'rb') - #upload file - upload = requests.put(destination, data=data, headers=headers) - check_response(upload, 's3 signing failed') - #end if(args.file) - - #assemble schema (.columns is required, even if empty) - #TODO specify format for columns - #See https://github.com/quiltdata/python-api/issues/41 - file_path = signature['path'] if signature and signature['path'] else None - schema = { - 'csvfile': file_path, - 'name': args.name, - 'description': args.description, - 'columns': [], - 'is_public': args.public - } - #create data set - endpoint = "%s/tables/" % (args.endpoint) - create = requests.post(endpoint, - auth=HTTPBasicAuth(args.user, passwd), - data=json.dumps(schema), - headers=HEADERS) - check_response(create, 'problem creating data set') -#end main - -# get_upload_url fetch signed URL from backend -def get_upload_url(file_name, args, passwd, file_type='text/plain'): - print(file_name) - data = {'fileName': file_name, 'fileType': file_type} - endpoint = "%s/files/" % (args.endpoint) - #use post not get to avoid enumerating all files in response - return requests.post( - endpoint, - auth=HTTPBasicAuth(args.user, passwd), - params=data, - files={'file': file_name}, - headers=HEADERS) - - -def check_response(response, msg): - if not response.ok: - sys.stderr.write('Oops, %s\n' % msg) - detail = json.loads(response.text)['detail'] - sys.stderr.write('%s %s: %s\n' % (response.status_code, response.reason, detail)) - sys.exit() - - -if __name__ == "__main__": - create(sys.argv[1:]) diff --git a/examples/babysleep.py b/examples/babysleep.py deleted file mode 100644 index 5dd9ba6..0000000 --- a/examples/babysleep.py +++ /dev/null @@ -1,130 +0,0 @@ - -import json -import requests -from datetime import datetime - -import quilt - -con = quilt.Connection('kevin') - -def status_check(response): - if response: - print response.status_code - else: - print "None" - -def get_field(dict, key): - value = None - try: - if dict.has_key(key): - value = dict.get(key) - - if isinstance(value, type({})): - if value.has_key("$oid"): - value = value["$oid"] - elif value.has_key("$date"): - value = value["$date"] - except Exception as error: - print error - traceback.print_exc(file=sys.stdout) - finally: - return value - -#u'_id' -#u'babyId' -#u'breastPumpingSide' -> side -#u'breastFeedingSide' -> side -#u'quantityAmount' -#u'quantityUnits' -#u'quantityAmountLeft' -#u'quantityUnitsLeft' -#u'quantityAmountRight' -#u'quantityUnitsRight' -#u'bottleType' -#u'createdDateTime' -#u'notes' -#u'endDateTime' -#u'solidFood' -#u'diaperType' -#u'startDateTime' -#u'type' -#u'updatedDateTime' -#u'lastUpdatedByUserId' -#u'isDeleted' - - -columns = [] -columns.append({'name' : '_id', 'type' : 'String'}) -columns.append({'name' : 'babyId', 'type' : 'String'}) -columns.append({'name' : 'breastFeedingSide', 'type' : 'String'}) -columns.append({'name' : 'breastPumpingSide', 'type' : 'String'}) -columns.append({'name' : 'quantityAmount', 'type' : 'Number'}) -columns.append({'name' : 'quantityUnits', 'type' : 'String'}) -columns.append({'name' : 'quantityAmountLeft', 'type' : 'Number'}) -columns.append({'name' : 'quantityUnitsLeft', 'type' : 'String'}) -columns.append({'name' : 'quantityAmountRight', 'type' : 'Number'}) -columns.append({'name' : 'quantityUnitsRight', 'type' : 'String'}) -columns.append({'name' : 'imageUrl', 'type' : 'String'}) -columns.append({'name' : 'imageUrlThumb', 'type' : 'String'}) -columns.append({'name' : 'imageIdentifier', 'type' : 'String'}) -columns.append({'name' : 'solidFood', 'type' : 'String'}) -columns.append({'name' : 'type', 'type' : 'String'}) -columns.append({'name' : 'eventName', 'type' : 'String'}) -columns.append({'name' : 'diaperType', 'type' : 'String'}) -columns.append({'name' : 'bottleType', 'type' : 'String'}) -columns.append({'name' : 'notes', 'type' : 'Text'}) -columns.append({'name' : 'startDateTime', 'type' : 'DateTime'}) -columns.append({'name' : 'endDateTime', 'type' : 'DateTime'}) -columns.append({'name' : 'createdDateTime', 'type' : 'DateTime'}) -columns.append({'name' : 'updatedDateTime', 'type' : 'DateTime'}) -columns.append({'name' : 'lastUpdatedByUserId', 'type' : 'String'}) -columns.append({'name' : 'isDeleted', 'type' : 'String'}) -columns.append({'name' : '__v', 'type' : 'String'}) - -t = con.create_table(name='Baby Events', columns=columns) - -fields = {c.name : c for c in t.columns} - -count = 0 -rowcount = 0 -maxreq = 20 -res = [] -i = 0 -with open('/Users/kmoore/Downloads/babyevents.json', 'rb') as file: - buffer = [] - - for line in file: - count += 1 - event=json.loads(line) - - row = {} - for k in event.keys(): - sqlname = fields[k].field - row[sqlname] = get_field(event, k) - buffer.append(row) - if len(buffer) >= 250: - rowcount += len(buffer) - - while len(res) > maxreq: - finished = [(r, b) for r, b in res if r.ready()] - for r, b in finished: - if not r.successful(): - print "Retrying:" - print b - res.append((t.create_async(b, status_check), b)) - - res[:] = [(r, b) for r, b in res if not r.ready()] - if len(res) > maxreq: - r, b = res[0] - r.wait() - - res.append((t.create_async(buffer, status_check), buffer)) - #t.create(buffer) - buffer = [] - t.create(buffer) - rowcount += len(buffer) - buffer = [] - - -print "End of File? Line count = %d, Rows count=%s" % (count, rowcount) - diff --git a/examples/crispr.py b/examples/crispr.py deleted file mode 100644 index 9957a5b..0000000 --- a/examples/crispr.py +++ /dev/null @@ -1,61 +0,0 @@ -import requests -from datetime import datetime -import quilt - -con = quilt.Connection('kmoore') - -def status_check(response): - if response: - print response.status_code - else: - print "None" - - - -ut = con.get_table(1924) -lt = con.get_table(1529) - -field_map = {} -field_map['grna_name_000'] = 'grna_name_000' -field_map['gene_001'] = 'grna_targetgene_001' -field_map['grna_sequence_002'] = 'grna_sequence_002' -field_map['oligo_library_003'] = 'oligo_library_009' -field_map['oligo_plate_f_004'] = 'oligo_plate_f_007' -field_map['oligo_plate_r_005'] = 'oligo_plate_r_008' - -rowcount = 0 -maxreq = 20 -buffer = [] -res = [] -for row in lt: - nr = {} - nr['source'] = lt.name - for f, t in field_map.items(): - nr[f] = row[t] - buffer.append(nr) - - if len(buffer) >= 250: - rowcount += len(buffer) - - while len(res) > maxreq: - finished = [(r, b) for r, b in res if r.ready()] - for r, b in finished: - if not r.successful(): - print "Retrying:" - print b - res.append((ut.create_async(b, status_check), b)) - - res[:] = [(r, b) for r, b in res if not r.ready()] - if len(res) > maxreq: - r, b = res[0] - r.wait() - - res.append((ut.create_async(buffer, status_check), buffer)) - #ut.create(buffer) - buffer = [] -ut.create(buffer) -rowcount += len(buffer) -buffer = [] - -print "Inserted = %d" % (rowcount) - diff --git a/examples/louise.py b/examples/louise.py deleted file mode 100644 index 75acded..0000000 --- a/examples/louise.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -import requests -import quilt - -con = quilt.Connection('kmoore') -source_id = 2195 - -columns = [ - {'name' : 'iN_RNAseq ID', - 'sqlname' : 'in_rnaseq_id', - 'description' : 'QRID from iN_RNAseq table', - 'type' : 'Number'}, - {'name' : 'Transcript ID', - 'sqlname' : 'transcipt_id', - 'type' : 'String'} - ] - -t = con.create_table(name="Transcript IDs", - description="Transcript IDs separated from table %s" % source_id, - columns=columns) - -def insert(t, i, buffer): - response = t.create(buffer) - if response.status_code == 200: - print "Processed %d rows, inserting %d" % (i+1, len(buffer)) - else: - print "Warning: Insert failed!" - print response.text - -src = con.get_table(source_id) -batch = 250 -buffer = [] -for i, row in enumerate(src): - #transcript_id_s_002 - in_rna_id = int(row['qrid']) - t_id_str = row['transcript_id_s_002'] - tids = t_id_str.split(',') - for tid in tids: - tuple = { 'in_rnaseq_id' : in_rna_id, - 'transcipt_id' : tid } - buffer.append(tuple) - - if len(buffer) >= batch: - insert(t, i, buffer) - buffer = [] - -insert(t, i, buffer) - diff --git a/examples/nests.py b/examples/nests.py deleted file mode 100644 index d08bfb9..0000000 --- a/examples/nests.py +++ /dev/null @@ -1,126 +0,0 @@ - -import json -import requests -from datetime import datetime - -import quilt - -con = quilt.Connection('kmoore') - -def status_check(response): - if response: - print response.status_code - else: - print "None" - -def get_field(dict, key): - value = None - try: - if dict.has_key(key): - value = dict.get(key) - - if isinstance(value, type({})): - if value.has_key("$oid"): - value = value["$oid"] - elif value.has_key("$date"): - ut = value["$date"] - - try: - ival = int(ut)/1000 - dt = datetime.fromtimestamp(ival) - value = dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ") - except: - value = None - - if not value: - try: - ival = long(ut)/1000000000 - dt = datetime.fromtimestamp(ival) - value = dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ") - except: - pass - - except Exception as error: - print error - traceback.print_exc(file=sys.stdout) - finally: - return value - -columns = [] -columns.append({'name' : '_id', 'type' : 'String'}) -columns.append({'name' : 'creatorId', 'type' : 'String'}) -columns.append({'name' : 'name', 'type' : 'String'}) -columns.append({'name' : 'members', 'type' : 'Text'}) -columns.append({'name' : 'babies', 'type' : 'Text'}) -columns.append({'name' : 'lastModified', 'type' : 'String'}) -columns.append({'name' : '__v', 'type' : 'String'}) -nests = con.create_table(name='Nests', columns=columns) - -columns = [] -columns.append({'name' : '_id', 'type' : 'String'}) -columns.append({'name' : 'name', 'type' : 'String'}) -columns.append({'name' : 'dob', 'type' : 'DateTime'}) -columns.append({'name' : 'gender', 'type' : 'String'}) -columns.append({'name' : 'bloodType', 'type' : 'String'}) -columns.append({'name' : 'imgUrlSmall', 'type' : 'Text'}) -babies = con.create_table(name='Babies', columns=columns) - -fields = {c.name : c for c in nests.columns} -baby_fields = {c.name : c for c in babies.columns} - -#all_keys = {} -count = 0 -rowcount = 0 -maxreq = 20 -res = [] -i = 0 -with open('/Users/kmoore/Downloads/nests.json', 'rb') as file: - nest_buffer = [] - babies_buffer = [] - - for line in file: - count += 1 - nest=json.loads(line) - - row = {} - for k in nest.keys(): - sqlname = fields[k].field - row[sqlname] = get_field(nest, k) - - nest_babies = nest.get('babies') if nest.has_key('babies') else [] - for b in nest_babies: - baby_row = {} - for k in b.keys(): - sqlname = baby_fields[k].field - baby_row[sqlname] = get_field(b, k) - babies_buffer.append(baby_row) - - nest_buffer.append(row) - if len(nest_buffer) >= 100: - rowcount += len(nest_buffer) - - response = nests.create(nest_buffer) - if response.status_code != 200: - print response.text - - response = babies.create(babies_buffer) - if response.status_code != 200: - for entry in babies_buffer: - response = babies.create(entry) - if response.status_code != 200: - print response.text - print entry - - nest_buffer = [] - babies_buffer = [] - nests.create(nest_buffer) - babies.create(babies_buffer) - - rowcount += len(nest_buffer) - nest_buffer = [] - -#for k, kcount in all_keys.items(): -# print "%00d %s" % (kcount, k) - -print "End of File? Line count = %d, Rows count=%s" % (count, rowcount) - diff --git a/get-files-to-upload/README.md b/get-files-to-upload/README.md deleted file mode 100644 index 6cdb0c5..0000000 --- a/get-files-to-upload/README.md +++ /dev/null @@ -1,2 +0,0 @@ -* `wgEncodeBroadHistone-All.txt` derives from `files.txt` in the [ENCODE Broad Histone Repository](http://hgdownload.cse.ucsc.edu/goldenPath/hg19/encodeDCC/wgEncodeBroadHistone/) -* `extract-peak-files.py` filters a `.txt` file down to the lines that match the regular expression `*peak.gz` diff --git a/get-files-to-upload/curl-all.py b/get-files-to-upload/curl-all.py deleted file mode 100644 index 51b3fbb..0000000 --- a/get-files-to-upload/curl-all.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/python -import re -from subprocess import call -import sys - - -def main(argv): - if len(argv) != 4: - sys.stderr.write("Usage: %s \n" % argv[0]) - return 1 - - fname = argv[1] - url = argv[2] - my_dir = './' + argv[3] + '/' - f = open(fname, 'r') - raw = f.read() - - tokens = raw.split() - for t in tokens: - call(['curl', '-o', my_dir + t, url + t]) - - return 1 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/get-files-to-upload/extract-matching-files.py b/get-files-to-upload/extract-matching-files.py deleted file mode 100644 index b4c0256..0000000 --- a/get-files-to-upload/extract-matching-files.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/path/to/python - -import re -import sys - - -def main(argv): - if len(argv) != 2: - sys.stderr.write("Usage: %s \n" % argv[0]) - return 1 - - fname = argv[1] - f = open(fname, 'r') - raw = f.read() - - peak_files = re.compile('\"(.*peak\.gz)\"', re.IGNORECASE) - tokens = raw.split() - for t in tokens: - m = peak_files.match(t) - if m: - sys.stdout.write("%s\n" % m.group(1)) - - return 1 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/get-files-to-upload/wgEncodeBroadHistone-All.txt b/get-files-to-upload/wgEncodeBroadHistone-All.txt deleted file mode 100644 index dce2e45..0000000 --- a/get-files-to-upload/wgEncodeBroadHistone-All.txt +++ /dev/null @@ -1,2367 +0,0 @@ -value -"Name" -"Last modified" -"Size" -"Description" -"Parent Directory" -"files.txt" -"md5sum.history" -"md5sum.txt" -"wgEncodeBroadHistoneA549ControlDex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549ControlDex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549ControlDex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549ControlDex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549ControlDex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549ControlDex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549ControlDex100nmSig.bigWig" -"wgEncodeBroadHistoneA549ControlEtoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549ControlEtoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549ControlEtoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549ControlEtoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549ControlEtoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549ControlEtoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549ControlEtoh02Sig.bigWig" -"wgEncodeBroadHistoneA549CtcfDex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549CtcfDex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549CtcfDex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549CtcfDex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549CtcfDex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549CtcfDex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549CtcfDex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549CtcfDex100nmSig.bigWig" -"wgEncodeBroadHistoneA549CtcfEtoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549CtcfEtoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549CtcfEtoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549CtcfEtoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549CtcfEtoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549CtcfEtoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549CtcfEtoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549CtcfEtoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H2azDex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H2azDex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H2azDex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H2azDex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H2azDex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H2azDex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H2azDex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H2azDex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H2azEtoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H2azEtoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H2azEtoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H2azEtoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H2azEtoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H2azEtoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H2azEtoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H2azEtoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me1Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k04me1Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me1Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me1Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me1Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me1Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me1Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me1Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me1Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me2Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k04me2Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me2Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me2Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me2Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me2Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me2Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me2Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me2Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me3Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k04me3Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k04me3Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k04me3Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k04me3Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k04me3Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k04me3Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me3Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k04me3Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k09acEtoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k09acEtoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k09acEtoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k09acEtoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k09acEtoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k09acEtoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k09acEtoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k09acEtoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k09me3Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k09me3Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k09me3Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k09me3Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k09me3Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k09me3Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k09me3Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k09me3Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k27acDex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k27acDex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k27acDex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k27acDex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k27acDex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k27acDex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k27acDex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k27acDex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k27acEtoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k27acEtoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k27acEtoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k27acEtoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k27acEtoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k27acEtoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k27acEtoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k27acEtoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k27me3Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k27me3Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k27me3Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k27me3Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k27me3Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k27me3Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k27me3Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k27me3Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k27me3Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k36me3Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k36me3Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k36me3Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k36me3Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k36me3Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k36me3Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k36me3Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k36me3Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k36me3Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmAlnRep1.bam" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmAlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmAlnRep2.bam" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmAlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmPk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k79me2Dex100nmSig.bigWig" -"wgEncodeBroadHistoneA549H3k79me2Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H3k79me2Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H3k79me2Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H3k79me2Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H3k79me2Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H3k79me2Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H3k79me2Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H3k79me2Etoh02Sig.bigWig" -"wgEncodeBroadHistoneA549H4k20me1Etoh02AlnRep1.bam" -"wgEncodeBroadHistoneA549H4k20me1Etoh02AlnRep1.bam.bai" -"wgEncodeBroadHistoneA549H4k20me1Etoh02AlnRep2.bam" -"wgEncodeBroadHistoneA549H4k20me1Etoh02AlnRep2.bam.bai" -"wgEncodeBroadHistoneA549H4k20me1Etoh02Pk.broadPeak.gz" -"wgEncodeBroadHistoneA549H4k20me1Etoh02RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneA549H4k20me1Etoh02RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneA549H4k20me1Etoh02Sig.bigWig" -"wgEncodeBroadHistoneCd20ControlAlnRep1.bam" -"wgEncodeBroadHistoneCd20ControlAlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20ControlAlnRep2.bam" -"wgEncodeBroadHistoneCd20ControlAlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20ControlRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20ControlRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20ControlSig.bigWig" -"wgEncodeBroadHistoneCd20CtcfAlnRep1.bam" -"wgEncodeBroadHistoneCd20CtcfAlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20CtcfAlnRep2.bam" -"wgEncodeBroadHistoneCd20CtcfAlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20CtcfAlnRep3.bam" -"wgEncodeBroadHistoneCd20CtcfAlnRep3.bam.bai" -"wgEncodeBroadHistoneCd20CtcfPk.broadPeak.gz" -"wgEncodeBroadHistoneCd20CtcfRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20CtcfRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20CtcfRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneCd20CtcfSig.bigWig" -"wgEncodeBroadHistoneCd20H2azAlnRep1.bam" -"wgEncodeBroadHistoneCd20H2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20H2azAlnRep2.bam" -"wgEncodeBroadHistoneCd20H2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20H2azPk.broadPeak.gz" -"wgEncodeBroadHistoneCd20H2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20H2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20H2azSig.bigWig" -"wgEncodeBroadHistoneCd20H3k04me2AlnRep1.bam" -"wgEncodeBroadHistoneCd20H3k04me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20H3k04me2AlnRep2.bam" -"wgEncodeBroadHistoneCd20H3k04me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20H3k04me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneCd20H3k04me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20H3k04me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20H3k04me2Sig.bigWig" -"wgEncodeBroadHistoneCd20ro01794Ezh239875AlnRep1.bam" -"wgEncodeBroadHistoneCd20ro01794Ezh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20ro01794Ezh239875AlnRep2.bam" -"wgEncodeBroadHistoneCd20ro01794Ezh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20ro01794Ezh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneCd20ro01794Ezh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794Ezh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794Ezh239875Sig.bigWig" -"wgEncodeBroadHistoneCd20ro01794H3k27acAlnRep1.bam" -"wgEncodeBroadHistoneCd20ro01794H3k27acAlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20ro01794H3k27acAlnRep2.bam" -"wgEncodeBroadHistoneCd20ro01794H3k27acAlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20ro01794H3k27acPk.broadPeak.gz" -"wgEncodeBroadHistoneCd20ro01794H3k27acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794H3k27acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794H3k27acSig.bigWig" -"wgEncodeBroadHistoneCd20ro01794H4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneCd20ro01794H4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneCd20ro01794H4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneCd20ro01794H4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneCd20ro01794H4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneCd20ro01794H4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794H4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneCd20ro01794H4k20me1Sig.bigWig" -"wgEncodeBroadHistoneDnd41ControlStdAlnRep1.bam" -"wgEncodeBroadHistoneDnd41ControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41ControlStdAlnRep2.bam" -"wgEncodeBroadHistoneDnd41ControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41ControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41ControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41ControlStdSig.bigWig" -"wgEncodeBroadHistoneDnd41CtcfAlnRep1.bam" -"wgEncodeBroadHistoneDnd41CtcfAlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41CtcfAlnRep2.bam" -"wgEncodeBroadHistoneDnd41CtcfAlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41CtcfPk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41CtcfRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41CtcfRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41CtcfSig.bigWig" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep1.bam" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep2.bam" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep3.bam" -"wgEncodeBroadHistoneDnd41Ezh239875AlnRep3.bam.bai" -"wgEncodeBroadHistoneDnd41Ezh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41Ezh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41Ezh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41Ezh239875RawDataRep3.fastq.gz" -"wgEncodeBroadHistoneDnd41Ezh239875Sig.bigWig" -"wgEncodeBroadHistoneDnd41H2azAlnRep1.bam" -"wgEncodeBroadHistoneDnd41H2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H2azAlnRep2.bam" -"wgEncodeBroadHistoneDnd41H2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H2azPk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H2azSig.bigWig" -"wgEncodeBroadHistoneDnd41H3k04me1AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k04me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me1AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k04me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k04me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me1Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k04me2AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k04me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me2AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k04me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k04me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me2Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k04me3AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k04me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me3AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k04me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k04me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k04me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k04me3Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k09acAlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k09acAlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k09acAlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k09acAlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k09acPk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k09acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k09acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k09acSig.bigWig" -"wgEncodeBroadHistoneDnd41H3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k09me3Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k27acAlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k27acAlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k27acAlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k27acAlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k27acPk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k27acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k27acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k27acSig.bigWig" -"wgEncodeBroadHistoneDnd41H3k27me3AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k27me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k27me3AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k27me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k27me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k27me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k27me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k27me3Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k36me3AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k36me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k36me3AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k36me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k36me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k36me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k36me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k36me3Sig.bigWig" -"wgEncodeBroadHistoneDnd41H3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H3k79me2Sig.bigWig" -"wgEncodeBroadHistoneDnd41H4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneDnd41H4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneDnd41H4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneDnd41H4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneDnd41H4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneDnd41H4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneDnd41H4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneDnd41H4k20me1Sig.bigWig" -"wgEncodeBroadHistoneGm12878ControlStdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878ControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878ControlStdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878ControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878ControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878ControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878ControlStdSig.bigWig" -"wgEncodeBroadHistoneGm12878CtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878CtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878CtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878CtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878CtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878CtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878CtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878CtcfStdSig.bigWig" -"wgEncodeBroadHistoneGm12878Ezh239875AlnRep1.bam" -"wgEncodeBroadHistoneGm12878Ezh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878Ezh239875AlnRep2.bam" -"wgEncodeBroadHistoneGm12878Ezh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878Ezh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878Ezh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878Ezh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878Ezh239875Sig.bigWig" -"wgEncodeBroadHistoneGm12878H2azStdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H2azStdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H2azStdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k04me1StdAlnRep1V2.bam" -"wgEncodeBroadHistoneGm12878H3k04me1StdAlnRep1V2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k04me1StdPkV2.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k04me1StdRawDataRep1V2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k04me1StdSigV2.bigWig" -"wgEncodeBroadHistoneGm12878H3k04me3StdAlnRep2V2.bam" -"wgEncodeBroadHistoneGm12878H3k04me3StdAlnRep2V2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k04me3StdPkV2.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k04me3StdRawDataRep2V2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k04me3StdSigV2.bigWig" -"wgEncodeBroadHistoneGm12878H3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k9acStdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep3.bam" -"wgEncodeBroadHistoneGm12878H3k9me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneGm12878H3k9me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k9me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k9me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k9me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k9me3StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k27acStdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep3V2.bam" -"wgEncodeBroadHistoneGm12878H3k27me3StdAlnRep3V2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k27me3StdPkV2.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k27me3StdRawDataRep3V2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k27me3StdSigV2.bigWig" -"wgEncodeBroadHistoneGm12878H3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneGm12878H4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneGm12878H4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneGm12878H4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneGm12878H4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneGm12878H4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneGm12878H4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneGm12878H4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneGm12878H4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneH1hescChd1a301218aStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescChd1a301218aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescChd1a301218aStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescChd1a301218aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescChd1a301218aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescChd1a301218aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescChd1a301218aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescChd1a301218aStdSig.bigWig" -"wgEncodeBroadHistoneH1hescChd7a301223a1AlnRep1.bam" -"wgEncodeBroadHistoneH1hescChd7a301223a1AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescChd7a301223a1Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescChd7a301223a1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescChd7a301223a1Sig.bigWig" -"wgEncodeBroadHistoneH1hescControlStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescControlStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescControlStdSig.bigWig" -"wgEncodeBroadHistoneH1hescCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescCtcfStdSig.bigWig" -"wgEncodeBroadHistoneH1hescEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneH1hescEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneH1hescEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescEzh239875Sig.bigWig" -"wgEncodeBroadHistoneH1hescH2azStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH2azStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH2azStdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k09me3StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k09me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k09me3StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k09me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k09me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k09me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k09me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k09me3StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneH1hescH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneH1hescHdac2a300705aAlnRep1.bam" -"wgEncodeBroadHistoneH1hescHdac2a300705aAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescHdac2a300705aPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescHdac2a300705aRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescHdac2a300705aSig.bigWig" -"wgEncodeBroadHistoneH1hescHdac6a301341aAlnRep1.bam" -"wgEncodeBroadHistoneH1hescHdac6a301341aAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescHdac6a301341aPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescHdac6a301341aRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescHdac6a301341aSig.bigWig" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescJarid1aab26049StdSig.bigWig" -"wgEncodeBroadHistoneH1hescJmjd2aa300861a1AlnRep1.bam" -"wgEncodeBroadHistoneH1hescJmjd2aa300861a1AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescJmjd2aa300861a1Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescJmjd2aa300861a1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescJmjd2aa300861a1Sig.bigWig" -"wgEncodeBroadHistoneH1hescP300kat3bAlnRep1.bam" -"wgEncodeBroadHistoneH1hescP300kat3bAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescP300kat3bPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescP300kat3bRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescP300kat3bSig.bigWig" -"wgEncodeBroadHistoneH1hescPhf8a301772aAlnRep1.bam" -"wgEncodeBroadHistoneH1hescPhf8a301772aAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescPhf8a301772aPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescPhf8a301772aRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescPhf8a301772aSig.bigWig" -"wgEncodeBroadHistoneH1hescPlu1AlnRep1.bam" -"wgEncodeBroadHistoneH1hescPlu1AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescPlu1Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescPlu1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescPlu1Sig.bigWig" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdAlnRep1.bam" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdAlnRep2.bam" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneH1hescRbbp5a300109aStdSig.bigWig" -"wgEncodeBroadHistoneH1hescSap3039731AlnRep1.bam" -"wgEncodeBroadHistoneH1hescSap3039731AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescSap3039731Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescSap3039731RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescSap3039731Sig.bigWig" -"wgEncodeBroadHistoneH1hescSirt6AlnRep1.bam" -"wgEncodeBroadHistoneH1hescSirt6AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescSirt6Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescSirt6RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescSirt6Sig.bigWig" -"wgEncodeBroadHistoneH1hescSuz12051317AlnRep1.bam" -"wgEncodeBroadHistoneH1hescSuz12051317AlnRep1.bam.bai" -"wgEncodeBroadHistoneH1hescSuz12051317Pk.broadPeak.gz" -"wgEncodeBroadHistoneH1hescSuz12051317RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneH1hescSuz12051317Sig.bigWig" -"wgEncodeBroadHistoneHelas3ControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3ControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3ControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3ControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3ControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3ControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3ControlStdSig.bigWig" -"wgEncodeBroadHistoneHelas3CtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3CtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3CtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3CtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3CtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3CtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3CtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3CtcfStdSig.bigWig" -"wgEncodeBroadHistoneHelas3Ezh239875AlnRep1.bam" -"wgEncodeBroadHistoneHelas3Ezh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3Ezh239875AlnRep2.bam" -"wgEncodeBroadHistoneHelas3Ezh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3Ezh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3Ezh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3Ezh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3Ezh239875Sig.bigWig" -"wgEncodeBroadHistoneHelas3H2azAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H2azAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H2azPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H2azSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k04me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k04me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k04me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k04me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k04me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k04me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k04me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k04me1StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k09me3Sig.bigWig" -"wgEncodeBroadHistoneHelas3H3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneHelas3H4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3H4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3H4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3H4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3H4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3H4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3H4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3H4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHelas3Pol2bStdAlnRep1.bam" -"wgEncodeBroadHistoneHelas3Pol2bStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHelas3Pol2bStdAlnRep2.bam" -"wgEncodeBroadHistoneHelas3Pol2bStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHelas3Pol2bStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHelas3Pol2bStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHelas3Pol2bStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHelas3Pol2bStdSig.bigWig" -"wgEncodeBroadHistoneHepg2ControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2ControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2ControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2ControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2ControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2ControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2ControlStdSig.bigWig" -"wgEncodeBroadHistoneHepg2CtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2CtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2CtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2CtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2CtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2CtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2CtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2CtcfStdSig.bigWig" -"wgEncodeBroadHistoneHepg2Ezh239875AlnRep1.bam" -"wgEncodeBroadHistoneHepg2Ezh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2Ezh239875AlnRep2.bam" -"wgEncodeBroadHistoneHepg2Ezh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2Ezh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2Ezh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2Ezh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2Ezh239875Sig.bigWig" -"wgEncodeBroadHistoneHepg2H2azStdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H2azStdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H2azStdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k04me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k04me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k04me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k04me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k04me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k04me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k04me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k04me1StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k09me3Sig.bigWig" -"wgEncodeBroadHistoneHepg2H3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneHepg2H4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHepg2H4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHepg2H4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHepg2H4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHepg2H4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHepg2H4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHepg2H4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHepg2H4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHmecControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHmecControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHmecControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecControlStdSig.bigWig" -"wgEncodeBroadHistoneHmecCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHmecCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHmecCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecCtcfStdSig.bigWig" -"wgEncodeBroadHistoneHmecEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneHmecEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneHmecEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHmecEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecEzh239875Sig.bigWig" -"wgEncodeBroadHistoneHmecH2azAlnRep1.bam" -"wgEncodeBroadHistoneHmecH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH2azAlnRep2.bam" -"wgEncodeBroadHistoneHmecH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH2azSig.bigWig" -"wgEncodeBroadHistoneHmecH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneHmecH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHmecH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneHmecH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneHmecH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneHmecH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHmecH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHmecH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHmecH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHmecH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHmecH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHmecH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHmecH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHsmmControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmControlStdSig.bigWig" -"wgEncodeBroadHistoneHsmmCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmCtcfStdSig.bigWig" -"wgEncodeBroadHistoneHsmmEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneHsmmEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneHsmmEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmEzh239875Sig.bigWig" -"wgEncodeBroadHistoneHsmmH2azStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH2azStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH2azStdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k9me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k9me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k9me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k9me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k9me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k9me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k9me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k9me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneHsmmH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtControlStdSig.bigWig" -"wgEncodeBroadHistoneHsmmtCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtCtcfStdSig.bigWig" -"wgEncodeBroadHistoneHsmmtEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneHsmmtEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneHsmmtEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtEzh239875Sig.bigWig" -"wgEncodeBroadHistoneHsmmtH2azStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH2azStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH2azStdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k27me3AlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k27me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k27me3AlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k27me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k27me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k27me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k27me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k27me3Sig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneHsmmtH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHsmmtH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHsmmtH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHsmmtH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHsmmtH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHsmmtH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHsmmtH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHsmmtH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHuvecControlStdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecControlStdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecControlStdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecControlStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecControlStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecControlStdSig.bigWig" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecCtcfStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecCtcfStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecCtcfStdSig.bigWig" -"wgEncodeBroadHistoneHuvecEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneHuvecEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneHuvecEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecEzh239875Sig.bigWig" -"wgEncodeBroadHistoneHuvecH2azAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH2azAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH2azSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k4me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k4me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k9acStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9acStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k9me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k9me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k9me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k9me1StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k27acStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k27acStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH3k36me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k36me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneHuvecH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneHuvecH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneHuvecH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecH4k20me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecH4k20me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep1.bam" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep2.bam" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep3.bam" -"wgEncodeBroadHistoneHuvecPol2bStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneHuvecPol2bStdPk.broadPeak.gz" -"wgEncodeBroadHistoneHuvecPol2bStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneHuvecPol2bStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneHuvecPol2bStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneHuvecPol2bStdSig.bigWig" -"wgEncodeBroadHistoneK562Cbpsc369AlnRep1.bam" -"wgEncodeBroadHistoneK562Cbpsc369AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Cbpsc369Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Cbpsc369RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Cbpsc369Sig.bigWig" -"wgEncodeBroadHistoneK562Cbx2AlnRep1.bam" -"wgEncodeBroadHistoneK562Cbx2AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Cbx2Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Cbx2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Cbx2Sig.bigWig" -"wgEncodeBroadHistoneK562Cbx3sc101004AlnRep1.bam" -"wgEncodeBroadHistoneK562Cbx3sc101004AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Cbx3sc101004Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Cbx3sc101004RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Cbx3sc101004Sig.bigWig" -"wgEncodeBroadHistoneK562Cbx8AlnRep1.bam" -"wgEncodeBroadHistoneK562Cbx8AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Cbx8Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Cbx8RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Cbx8Sig.bigWig" -"wgEncodeBroadHistoneK562Chd1a301218aStdAlnRep1.bam" -"wgEncodeBroadHistoneK562Chd1a301218aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Chd1a301218aStdAlnRep2.bam" -"wgEncodeBroadHistoneK562Chd1a301218aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Chd1a301218aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Chd1a301218aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Chd1a301218aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Chd1a301218aStdSig.bigWig" -"wgEncodeBroadHistoneK562Chd4mi2AlnRep1.bam" -"wgEncodeBroadHistoneK562Chd4mi2AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Chd4mi2Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Chd4mi2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Chd4mi2Sig.bigWig" -"wgEncodeBroadHistoneK562Chd7a301223a1AlnRep1.bam" -"wgEncodeBroadHistoneK562Chd7a301223a1AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Chd7a301223a1Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Chd7a301223a1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Chd7a301223a1Sig.bigWig" -"wgEncodeBroadHistoneK562ControlStdAlnRep1.bam" -"wgEncodeBroadHistoneK562ControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562ControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562ControlStdSig.bigWig" -"wgEncodeBroadHistoneK562CtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneK562CtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562CtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneK562CtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562CtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562CtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562CtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562CtcfStdSig.bigWig" -"wgEncodeBroadHistoneK562Ezh239875StdAlnRep1.bam" -"wgEncodeBroadHistoneK562Ezh239875StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Ezh239875StdAlnRep2.bam" -"wgEncodeBroadHistoneK562Ezh239875StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Ezh239875StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Ezh239875StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Ezh239875StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Ezh239875StdSig.bigWig" -"wgEncodeBroadHistoneK562H2azStdAlnRep1.bam" -"wgEncodeBroadHistoneK562H2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H2azStdAlnRep2.bam" -"wgEncodeBroadHistoneK562H2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H2azStdSig.bigWig" -"wgEncodeBroadHistoneK562H3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k9acStdSig.bigWig" -"wgEncodeBroadHistoneK562H3k9me1StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k9me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k9me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k9me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k9me1StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k9me3StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k9me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k9me3StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k9me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k9me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k9me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k9me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k9me3StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k27acStdSig.bigWig" -"wgEncodeBroadHistoneK562H3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneK562H3k79me2StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H3k79me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H3k79me2StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H3k79me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H3k79me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H3k79me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H3k79me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H3k79me2StdSig.bigWig" -"wgEncodeBroadHistoneK562H4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneK562H4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562H4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneK562H4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562H4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562H4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562H4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562H4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneK562Hdac1sc6298StdAlnRep1.bam" -"wgEncodeBroadHistoneK562Hdac1sc6298StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Hdac1sc6298StdAlnRep2.bam" -"wgEncodeBroadHistoneK562Hdac1sc6298StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Hdac1sc6298StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Hdac1sc6298StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Hdac1sc6298StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Hdac1sc6298StdSig.bigWig" -"wgEncodeBroadHistoneK562Hdac2a300705aStdAlnRep1.bam" -"wgEncodeBroadHistoneK562Hdac2a300705aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Hdac2a300705aStdAlnRep2.bam" -"wgEncodeBroadHistoneK562Hdac2a300705aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Hdac2a300705aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Hdac2a300705aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Hdac2a300705aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Hdac2a300705aStdSig.bigWig" -"wgEncodeBroadHistoneK562Hdac6a301341aAlnRep1.bam" -"wgEncodeBroadHistoneK562Hdac6a301341aAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Hdac6a301341aAlnRep2.bam" -"wgEncodeBroadHistoneK562Hdac6a301341aAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Hdac6a301341aPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Hdac6a301341aRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Hdac6a301341aRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Hdac6a301341aSig.bigWig" -"wgEncodeBroadHistoneK562Lsd1AlnRep1.bam" -"wgEncodeBroadHistoneK562Lsd1AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Lsd1Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Lsd1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Lsd1Sig.bigWig" -"wgEncodeBroadHistoneK562NcorAlnRep1.bam" -"wgEncodeBroadHistoneK562NcorAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562NcorPk.broadPeak.gz" -"wgEncodeBroadHistoneK562NcorRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562NcorSig.bigWig" -"wgEncodeBroadHistoneK562Nsd2ab75359AlnRep1.bam" -"wgEncodeBroadHistoneK562Nsd2ab75359AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Nsd2ab75359Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Nsd2ab75359RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Nsd2ab75359Sig.bigWig" -"wgEncodeBroadHistoneK562P300StdAlnRep1.bam" -"wgEncodeBroadHistoneK562P300StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562P300StdAlnRep2.bam" -"wgEncodeBroadHistoneK562P300StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562P300StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562P300StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562P300StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562P300StdSig.bigWig" -"wgEncodeBroadHistoneK562PcafAlnRep1.bam" -"wgEncodeBroadHistoneK562PcafAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562PcafPk.broadPeak.gz" -"wgEncodeBroadHistoneK562PcafRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562PcafSig.bigWig" -"wgEncodeBroadHistoneK562Phf8a301772aStdAlnRep1.bam" -"wgEncodeBroadHistoneK562Phf8a301772aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Phf8a301772aStdAlnRep2.bam" -"wgEncodeBroadHistoneK562Phf8a301772aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Phf8a301772aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Phf8a301772aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Phf8a301772aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Phf8a301772aStdSig.bigWig" -"wgEncodeBroadHistoneK562Plu1StdAlnRep1.bam" -"wgEncodeBroadHistoneK562Plu1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Plu1StdAlnRep2.bam" -"wgEncodeBroadHistoneK562Plu1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Plu1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Plu1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Plu1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Plu1StdSig.bigWig" -"wgEncodeBroadHistoneK562Pol2bStdAlnRep1.bam" -"wgEncodeBroadHistoneK562Pol2bStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Pol2bStdAlnRep2.bam" -"wgEncodeBroadHistoneK562Pol2bStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Pol2bStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Pol2bStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Pol2bStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Pol2bStdSig.bigWig" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdAlnRep1.bam" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdAlnRep2.bam" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Rbbp5a300109aStdSig.bigWig" -"wgEncodeBroadHistoneK562RestAlnRep1.bam" -"wgEncodeBroadHistoneK562RestAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562RestPk.broadPeak.gz" -"wgEncodeBroadHistoneK562RestRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562RestSig.bigWig" -"wgEncodeBroadHistoneK562Rnf2AlnRep1.bam" -"wgEncodeBroadHistoneK562Rnf2AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Rnf2Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Rnf2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Rnf2Sig.bigWig" -"wgEncodeBroadHistoneK562Sap3039731StdAlnRep1.bam" -"wgEncodeBroadHistoneK562Sap3039731StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Sap3039731StdAlnRep2.bam" -"wgEncodeBroadHistoneK562Sap3039731StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneK562Sap3039731StdPk.broadPeak.gz" -"wgEncodeBroadHistoneK562Sap3039731StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Sap3039731StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneK562Sap3039731StdSig.bigWig" -"wgEncodeBroadHistoneK562Setdb1AlnRep1.bam" -"wgEncodeBroadHistoneK562Setdb1AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Setdb1Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Setdb1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Setdb1Sig.bigWig" -"wgEncodeBroadHistoneK562Sirt6AlnRep1.bam" -"wgEncodeBroadHistoneK562Sirt6AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Sirt6Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Sirt6RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Sirt6Sig.bigWig" -"wgEncodeBroadHistoneK562Suz12051317AlnRep1.bam" -"wgEncodeBroadHistoneK562Suz12051317AlnRep1.bam.bai" -"wgEncodeBroadHistoneK562Suz12051317Pk.broadPeak.gz" -"wgEncodeBroadHistoneK562Suz12051317RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneK562Suz12051317Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746ControlAlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746ControlAlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746ControlAlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746ControlAlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746ControlRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746ControlRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746ControlSig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746CtcfAlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746CtcfAlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746CtcfAlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746CtcfAlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746CtcfPk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746CtcfRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746CtcfRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746CtcfSig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H2azAlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H2azAlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H2azPk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H2azSig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me1Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me2Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k04me3Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acAlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acAlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acAlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acAlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acPk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09acSig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k09me3Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acAlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acAlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acAlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acAlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acPk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27acSig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k27me3Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k36me3Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H3k79me2Sig.bigWig" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneMonocd14ro1746H4k20me1Sig.bigWig" -"wgEncodeBroadHistoneNhaControlStdAlnRep1.bam" -"wgEncodeBroadHistoneNhaControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaControlStdAlnRep2.bam" -"wgEncodeBroadHistoneNhaControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaControlStdSig.bigWig" -"wgEncodeBroadHistoneNhaCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneNhaCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneNhaCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaCtcfStdSig.bigWig" -"wgEncodeBroadHistoneNhaEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneNhaEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneNhaEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhaEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaEzh239875Sig.bigWig" -"wgEncodeBroadHistoneNhaH2azAlnRep1.bam" -"wgEncodeBroadHistoneNhaH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH2azAlnRep2.bam" -"wgEncodeBroadHistoneNhaH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH2azSig.bigWig" -"wgEncodeBroadHistoneNhaH3k04me2AlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k04me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k04me2AlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k04me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k04me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k04me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k04me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k04me2Sig.bigWig" -"wgEncodeBroadHistoneNhaH3k09acAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k09acAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k09acAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k09acAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k09acPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k09acRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k09acRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k09acSig.bigWig" -"wgEncodeBroadHistoneNhaH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneNhaH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneNhaH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneNhaH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneNhaH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneNhaH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneNhaH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneNhaH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneNhaH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneNhaH4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneNhaH4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhaH4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneNhaH4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhaH4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhaH4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhaH4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhaH4k20me1Sig.bigWig" -"wgEncodeBroadHistoneNhdfadControlStdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadControlStdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadControlStdSig.bigWig" -"wgEncodeBroadHistoneNhdfadCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadCtcfStdSig.bigWig" -"wgEncodeBroadHistoneNhdfadEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneNhdfadEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneNhdfadEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadEzh239875Sig.bigWig" -"wgEncodeBroadHistoneNhdfadH2azAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH2azAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH2azSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k04me1AlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k04me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k04me1AlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k04me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k04me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k04me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k04me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k04me1Sig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneNhdfadH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep3.bam" -"wgEncodeBroadHistoneNhdfadH4k20me1AlnRep3.bam.bai" -"wgEncodeBroadHistoneNhdfadH4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhdfadH4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhdfadH4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhdfadH4k20me1RawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhdfadH4k20me1Sig.bigWig" -"wgEncodeBroadHistoneNhekControlStdAlnRep1.bam" -"wgEncodeBroadHistoneNhekControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekControlStdAlnRep2.bam" -"wgEncodeBroadHistoneNhekControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekControlStdSig.bigWig" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep3.bam" -"wgEncodeBroadHistoneNhekCtcfStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekCtcfStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekCtcfStdSig.bigWig" -"wgEncodeBroadHistoneNhekEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneNhekEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneNhekEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhekEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekEzh239875Sig.bigWig" -"wgEncodeBroadHistoneNhekH2azAlnRep1.bam" -"wgEncodeBroadHistoneNhekH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH2azAlnRep2.bam" -"wgEncodeBroadHistoneNhekH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH2azSig.bigWig" -"wgEncodeBroadHistoneNhekH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k4me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k4me2StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me2StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k4me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k9acStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9acStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k9me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k9me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k9me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k9me1StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k27acStdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27acStdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k27me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH3k36me3StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k36me3StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneNhekH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneNhekH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneNhekH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep3.bam" -"wgEncodeBroadHistoneNhekH4k20me1StdAlnRep3.bam.bai" -"wgEncodeBroadHistoneNhekH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekH4k20me1StdRawDataRep3.fastq.gz" -"wgEncodeBroadHistoneNhekH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneNhekPol2bStdAlnRep1.bam" -"wgEncodeBroadHistoneNhekPol2bStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhekPol2bStdAlnRep2.bam" -"wgEncodeBroadHistoneNhekPol2bStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhekPol2bStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhekPol2bStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhekPol2bStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhekPol2bStdSig.bigWig" -"wgEncodeBroadHistoneNhlfControlStdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfControlStdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfControlStdSig.bigWig" -"wgEncodeBroadHistoneNhlfCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfCtcfStdSig.bigWig" -"wgEncodeBroadHistoneNhlfEzh239875AlnRep1.bam" -"wgEncodeBroadHistoneNhlfEzh239875AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfEzh239875AlnRep2.bam" -"wgEncodeBroadHistoneNhlfEzh239875AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfEzh239875Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfEzh239875RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfEzh239875RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfEzh239875Sig.bigWig" -"wgEncodeBroadHistoneNhlfH2azAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH2azAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH2azAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH2azAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH2azPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH2azRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH2azRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH2azSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k09me3AlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k09me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k09me3AlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k09me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k09me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k09me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k09me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k09me3Sig.bigWig" -"wgEncodeBroadHistoneNhlfH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k4me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k4me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k4me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k4me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k4me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k4me3StdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k9acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k9acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k9acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k9acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k9acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k9acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k9acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k9acStdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k27me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k27me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k27me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k27me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k27me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k27me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k27me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k27me3StdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k36me3StdSig.bigWig" -"wgEncodeBroadHistoneNhlfH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneNhlfH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneNhlfH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneNhlfH4k20me1StdAlnRep1.bam" -"wgEncodeBroadHistoneNhlfH4k20me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneNhlfH4k20me1StdAlnRep2.bam" -"wgEncodeBroadHistoneNhlfH4k20me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneNhlfH4k20me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneNhlfH4k20me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneNhlfH4k20me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneNhlfH4k20me1StdSig.bigWig" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep1.bam" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep2.bam" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep3.bam" -"wgEncodeBroadHistoneOsteoH3k04me3AlnRep3.bam.bai" -"wgEncodeBroadHistoneOsteoH3k04me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoH3k04me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k04me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k04me3RawDataRep3.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k04me3Sig.bigWig" -"wgEncodeBroadHistoneOsteoH3k27me3AlnRep1.bam" -"wgEncodeBroadHistoneOsteoH3k27me3AlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoH3k27me3AlnRep2.bam" -"wgEncodeBroadHistoneOsteoH3k27me3AlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoH3k27me3Pk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoH3k27me3RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k27me3RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k27me3Sig.bigWig" -"wgEncodeBroadHistoneOsteoH3k79me2AlnRep1.bam" -"wgEncodeBroadHistoneOsteoH3k79me2AlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoH3k79me2AlnRep2.bam" -"wgEncodeBroadHistoneOsteoH3k79me2AlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoH3k79me2Pk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoH3k79me2RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k79me2RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoH3k79me2Sig.bigWig" -"wgEncodeBroadHistoneOsteoH4k20me1AlnRep1.bam" -"wgEncodeBroadHistoneOsteoH4k20me1AlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoH4k20me1AlnRep2.bam" -"wgEncodeBroadHistoneOsteoH4k20me1AlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoH4k20me1Pk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoH4k20me1RawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoH4k20me1RawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoH4k20me1Sig.bigWig" -"wgEncodeBroadHistoneOsteoP300kat3bAlnRep1.bam" -"wgEncodeBroadHistoneOsteoP300kat3bAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoP300kat3bAlnRep2.bam" -"wgEncodeBroadHistoneOsteoP300kat3bAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoP300kat3bPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoP300kat3bRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoP300kat3bRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoP300kat3bSig.bigWig" -"wgEncodeBroadHistoneOsteoblControlStdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblControlStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblControlStdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblControlStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblControlStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblControlStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblControlStdSig.bigWig" -"wgEncodeBroadHistoneOsteoblCtcfStdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblCtcfStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblCtcfStdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblCtcfStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblCtcfStdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblCtcfStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblCtcfStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblCtcfStdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH2azStdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH2azStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH2azStdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH2azStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH2azStdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH2azStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH2azStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH2azStdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH3k4me1StdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH3k4me1StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k4me1StdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH3k4me1StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k4me1StdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH3k4me1StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k4me1StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k4me1StdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH3k4me2StdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH3k4me2StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k4me2StdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH3k4me2StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k4me2StdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH3k4me2StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k4me2StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k4me2StdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH3k9me3StdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH3k9me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k9me3StdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH3k9me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k9me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH3k9me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k9me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k9me3StdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH3k27acStdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH3k27acStdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k27acStdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH3k27acStdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k27acStdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH3k27acStdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k27acStdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k27acStdSig.bigWig" -"wgEncodeBroadHistoneOsteoblH3k36me3StdAlnRep1.bam" -"wgEncodeBroadHistoneOsteoblH3k36me3StdAlnRep1.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k36me3StdAlnRep2.bam" -"wgEncodeBroadHistoneOsteoblH3k36me3StdAlnRep2.bam.bai" -"wgEncodeBroadHistoneOsteoblH3k36me3StdPk.broadPeak.gz" -"wgEncodeBroadHistoneOsteoblH3k36me3StdRawDataRep1.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k36me3StdRawDataRep2.fastq.gz" -"wgEncodeBroadHistoneOsteoblH3k36me3StdSig.bigWig" \ No newline at end of file From f0ad7f33a1befb4c7c1ccbaac655013a37a84bba Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 11:54:27 -0700 Subject: [PATCH 46/56] strip dead dir --- ENCODE-example/batch.py | 51 ----------------------------------------- 1 file changed, 51 deletions(-) delete mode 100644 ENCODE-example/batch.py diff --git a/ENCODE-example/batch.py b/ENCODE-example/batch.py deleted file mode 100644 index d210aa8..0000000 --- a/ENCODE-example/batch.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/python -import getpass -import os -import re -import shlex -import sys -import time - -import dataset - -ENDPOINT = 'https://quilt-heroku.herokuapp.com/' -CORE_TAGS = ['Human', 'ENCODE', 'ChIPseq', 'hg19', 'Histone', 'BroadPeak'] -CORE_DESC = 'Sources\nhttps://www.encodeproject.org/\nhttp://hgdownload.cse.ucsc.edu/goldenPath/hg19/encodeDCC/wgEncodeBroadHistone/' -PUBLIC = True -WAIT = 20 #time (in seconds) to wait between uploads - -def process(argv): - if len(argv) != 2: - sys.stderr.write("Usage: %s \n" % argv[0]) - return 1 - - #password - passwd = getpass.getpass() - fname = argv[1] - #process line by line - lines = [line.strip() for line in open(fname)] - for l in lines: - name = os.path.splitext(l)[0] - pretags = re.findall('[A-Z][^A-Z]*', name) - #pull out tags already covered by core tags - pretags.remove('Encode') - pretags.remove('Broad') - pretags.remove('Histone') - pretags.remove('Pk') - #nice name - name = 'ENCODE ChIP-seq: ' + ' '.join(pretags) - tags = map(lambda x: '#' + x, CORE_TAGS + pretags) - description = ' '.join(tags) + '\n' + CORE_DESC - args = "-u USERNAME -n '%s' -d '%s' -f downloads/%s -p '%s' -x '%s'" % (name, description, l, PUBLIC, passwd) - argv = shlex.split(args) - #create data set on Quilt - dataset.create(argv) - print '%s' % (l) - #don't hammer - time.sleep(WAIT) - - return 1 - - -if __name__ == '__main__': - sys.exit(process(sys.argv)) From f445cb03dbe7450953cfe0910ee737baac51b449 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 13:43:53 -0700 Subject: [PATCH 47/56] encrypted test user credentials --- .travis.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 723f097..291a556 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,12 @@ language: python python: - '2.7' - env: global: - - secure: CUtSldmJk7y2PpimxpOWKNi30Ve/pQuzSrGxQyvBE3ny8nTFjzIK+DUcqCVgNDmCqm3AAkE/DsMyCgRCxTpXGW35mhEyCCUlqPlULgGBoBdx5KtMbgOHq+W/+R1utrmliS5N6GexcydAMQ/CaepKdpeyf0fgpLFes6J1WYG/2JGErHS5Vvbprv9E8UMuG7WoSJXtvSK6Cnz06wWVfGkvw5nLbftuCTfKgCzXU5CK6lFUQlDqJwC6bqsuuGCiWI34OnDmbThHZkNG7JfEapIdeWMoHR93LodviShuiGxhpS/WDewGEwzHXmys6oo6Z9/kLJdcJOCmxdFvYWQUGNxDwOSSFpOjKNXIUR4n6rfqOwarPzRGWQd4C7NLKjkiMymBk1qb016+wlGsicFBktPNV5O9KLL6aytRP34Q30yuAhqcQufVw37G3CdsEKEO6rXghIG81RP2AH64fC12TG29gnQmZQTW1Yv99J61X1+jsCAVVcx1rISO5JYRvMWCBuJK0Arpsjj0PE41/3yUT72TgPRZGeh1wDOvl6SCGNgo5xYoMzdhsoCxm//9rYTW/3HmEUtB59+9M9E9+IZnaT+vN+sWOAElPDEsgu1Kn90GIADfQ/wL8B0pstrVrL2zZSrqCyJxlxE2frq4yNJo74hMizE+QTy2fKumtljLN2x7WZM= + - secure: H4RMbyq7lTQlJsOaH/VBv9qW+DiahWx7GrPuTUHqidbfV9k8oFSp44SIsMrvgmuiRUOmdjeeUng0FFOfaKN5fZ3djp4ZJNHFN/m1ClDEoNidi5767OJKgK42V2DHbqEeFl2K+ktx7lsb3fB4+bVVFEU+IouyTLqNdNBEO6hhjd4i10XGm+0lwqvDdXwtOG6q6B86X1W+37wfpBa+CVZZZBVTXTxEa9WsBEpJgtpBwUddvszC2OvUwu+b2zMbwWmGUPU1Xly0HxdmyhQ9UOcHNYQLptO0kVRm+impcwuRxwOwhEi/LICFvJxz+bh7U1VEf3Si+mWN6i82eHbS532/F0cHNwHEQPA1+xJn0MPmh/y1OHXlKFOKy/gaqSAnz1gJ419PBcBIOjTyQp1iie20l+/Ap29UzRN6HRDyOW/jDG8hZALtKC99edYXspAfDFDedDUcN5bGN8KrNtfq7Lsdc8sN1+/9ioiHkGjUZCMWq13aHcmwcZKjf3W+l69oRO/Cx10tjZTZGtBmlxWtcHj4X1QSaF5u42YCgcxXPP8P3gKHT07TR+LgBZfwMZ2yJBql36p9dbkqDf4ZYbLIycKGM22i8Ex1EmZn6x9rLFPWKs8M8AFm29cYkxuL0Ke4ZKi3iuPgnwIiHlG1blXxzEfc3FVA2nE+YDh+yKo2ZNa+RhQ= matrix: - - QUILT_URL="https://quiltdata.com" - - QUILT_URL="https://quilt-heroku.herokuapp.com" - + - QUILT_URL="https://quiltdata.com" + - QUILT_URL="https://quilt-heroku.herokuapp.com" before_install: pip install pytest pytest-cov install: pip install -r requirements.txt script: python -m pytest --cov=quilt/ tests From ebbbff1c157b197c2c67a470f839cf43d81468e2 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 13:44:23 -0700 Subject: [PATCH 48/56] update repo path to new name in URLs --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 736bc1e..86f38ee 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,8 @@ def readme(): author='quiltdata', author_email='founders@quiltdata.io', license='LICENSE.txt', - url='https://github.com/quiltdata/API', - download_url='https://github.com/quiltdata/API/tarball/0.1.6', + url='https://github.com/quiltdata/python-api', + download_url='https://github.com/quiltdata/python-api/tarball/0.1.6', keywords='quiltdata api social shareable data platform', install_requires=[ 'requests==2.11.1', From 8dac86b373c2fd30099776c266824236ad8d1ff5 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 14:24:54 -0700 Subject: [PATCH 49/56] correct encrypted credentials --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 291a556..6944078 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ python: - '2.7' env: global: - - secure: H4RMbyq7lTQlJsOaH/VBv9qW+DiahWx7GrPuTUHqidbfV9k8oFSp44SIsMrvgmuiRUOmdjeeUng0FFOfaKN5fZ3djp4ZJNHFN/m1ClDEoNidi5767OJKgK42V2DHbqEeFl2K+ktx7lsb3fB4+bVVFEU+IouyTLqNdNBEO6hhjd4i10XGm+0lwqvDdXwtOG6q6B86X1W+37wfpBa+CVZZZBVTXTxEa9WsBEpJgtpBwUddvszC2OvUwu+b2zMbwWmGUPU1Xly0HxdmyhQ9UOcHNYQLptO0kVRm+impcwuRxwOwhEi/LICFvJxz+bh7U1VEf3Si+mWN6i82eHbS532/F0cHNwHEQPA1+xJn0MPmh/y1OHXlKFOKy/gaqSAnz1gJ419PBcBIOjTyQp1iie20l+/Ap29UzRN6HRDyOW/jDG8hZALtKC99edYXspAfDFDedDUcN5bGN8KrNtfq7Lsdc8sN1+/9ioiHkGjUZCMWq13aHcmwcZKjf3W+l69oRO/Cx10tjZTZGtBmlxWtcHj4X1QSaF5u42YCgcxXPP8P3gKHT07TR+LgBZfwMZ2yJBql36p9dbkqDf4ZYbLIycKGM22i8Ex1EmZn6x9rLFPWKs8M8AFm29cYkxuL0Ke4ZKi3iuPgnwIiHlG1blXxzEfc3FVA2nE+YDh+yKo2ZNa+RhQ= + secure: H2dD3a0D/0Ky53jHvINmo9bVHrBH3SLkfJaGoGeUprYSptoEG9ZdL4IqAGpKWJJYRvoOd84QjTEAVPcQqaUGwckJnj0uUxEXDiNv0OdyvMPr8csbhz44FWUquYM8hPrmBoJffsnSosgF6nmH0V1BqFoDfVJ4Ihy5Rabe62UuQwr1/S1XkbLt8YwnhVfcVMJ04VdgfVvi6NdblH7RDMafYf8K65p/JjQh48cVCkfukpY/qWELAU4Hz9BBzYUQmXbwyhCeRGG4Knh/pAZE6o9t4xARyuAs1gKbf6SMounDYOnI8m9GzuhHmxh7WUc7PkZfcSrnQcs8WIDuhszmIZRqIotajic1snyrhn3H1A0ayoL1GGPpo10L/ctOhNpeP/u4DwGrFV5lJqLXcRAze0p2+8IvDcN1cUo5f5+rQglCk5qNHyDDMQzZYdbgaMdCBpWJkB4Rodtqy4Zg/Fxm3vvsJwg4WsjeK3YoBLmCq9uZShukPK+mHwgo+UPuq/tI2otZSPPaxvIbLfe2FmHjPMp7sCpUrKEVvXKkPM5ju/7RFV2AA5AgSnV7l+6kwYeKAS0VXxZ7Tyzrl0A7P8zpMHKIB95ptkneu1g0Kdtn/cUBbFUJAbN2S2sILMKTcYXW6yBCDQm2pxdmQ6I+Xs0z15WgWlFCvloc8kE+hhaTca7M7h0= matrix: - QUILT_URL="https://quiltdata.com" - QUILT_URL="https://quilt-heroku.herokuapp.com" From 00adb9461af560d7d8ef1ed878401420b829677b Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 14:46:41 -0700 Subject: [PATCH 50/56] try encrypted test user credentials as separate add commands FOO=BAR --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 6944078..2dc60a2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,8 @@ python: - '2.7' env: global: - secure: H2dD3a0D/0Ky53jHvINmo9bVHrBH3SLkfJaGoGeUprYSptoEG9ZdL4IqAGpKWJJYRvoOd84QjTEAVPcQqaUGwckJnj0uUxEXDiNv0OdyvMPr8csbhz44FWUquYM8hPrmBoJffsnSosgF6nmH0V1BqFoDfVJ4Ihy5Rabe62UuQwr1/S1XkbLt8YwnhVfcVMJ04VdgfVvi6NdblH7RDMafYf8K65p/JjQh48cVCkfukpY/qWELAU4Hz9BBzYUQmXbwyhCeRGG4Knh/pAZE6o9t4xARyuAs1gKbf6SMounDYOnI8m9GzuhHmxh7WUc7PkZfcSrnQcs8WIDuhszmIZRqIotajic1snyrhn3H1A0ayoL1GGPpo10L/ctOhNpeP/u4DwGrFV5lJqLXcRAze0p2+8IvDcN1cUo5f5+rQglCk5qNHyDDMQzZYdbgaMdCBpWJkB4Rodtqy4Zg/Fxm3vvsJwg4WsjeK3YoBLmCq9uZShukPK+mHwgo+UPuq/tI2otZSPPaxvIbLfe2FmHjPMp7sCpUrKEVvXKkPM5ju/7RFV2AA5AgSnV7l+6kwYeKAS0VXxZ7Tyzrl0A7P8zpMHKIB95ptkneu1g0Kdtn/cUBbFUJAbN2S2sILMKTcYXW6yBCDQm2pxdmQ6I+Xs0z15WgWlFCvloc8kE+hhaTca7M7h0= + - secure: q+T0bXPd6uluG6rpz+c110teJBU0+6vH7SaK0KTux4DrKC2hbD3JO//f7i2qKrGnccGx9mRf6EcNXzGK9wLk3Wxp5xh9cw2GfvYf2S2Uehn7k2rnmUXt0DCehqM71BfWWlyPCYiy1dsy8QrfhVkm8rZUrswlo8vZkcmlxcZfMyEgan1l6YB0Xw1kud9fQjLhony3iblYEEkU/dRg4oyWKHGvirw+T+Ou9/avxhcrU+Oyp/f4qCDkScsO2c1X1JM6zXQlO73PlAxO72J9zAGiqNLvq/k/ZTVwhhjsxE49pfKuyLEYI8McgmDcvMVgzrikZa8zmFRsJz+bTGUvknSzguz/InLQjk9m2Bcf0bMr1iyL3033ccg5DfnlO6/UUzm/IwThRGzuks5v/ugqidyrWDal94nzv/UoLOGDXxVfW1vUvtExaRqtQ0Rd+Ae4cNzfXGDHNjCUSupeUW8Nh+q7VA2Py9WWdrvnEFnpxZCmQFSp4vVNzjETOZJGc5H7LTCQDHe7zxZujC3FBuIZqjkq6KB6rlC2ETxZw/K8b50B4N+InGdaWwG9ct8r61CC5a1lulNhOSI5ReO0u8tYBDHxTMWQNv8R0q4CrkBkMCFkbmQDUa1YcpXe63bhEiA4yzpgjmRHSJT/Yce49chpDBOM2FBaqRfQdH3hbHTT7RxXw8o= + - secure: w7NicXugzw1LNLYG8twjqN8nCHOTYYvIzCb3ZHA0XTK2Y0elC7JUmqkV6/SrJaHjyqwmbvIcz2eGyS0gpcTgC8g1lLJUdsXm+EZ8W3w4+960nIkU96w2BGwI4wvFwgvkZ2SjtLRYzYHMtVMJ+ltzrp8SZv24YHkhihRxgIWMhjo2k12JYz8co/jXFnWPeQQ7o18K6EiEBN5obnkQtnh84xaApTcQIdz5k4mCejPrBuHxnH8ye0oy/hyhKu3VHaBzqnB0K8DzmMZEvck4sXdaRxPU0W+LcgRjAo5FGop2s2vAhmSJHn/OG4tdNBI058rHlXSM9OVnUpJdssEw/6eKNeruynsj5CQ2NfElCem+RTSf5w5ZsN9u3x9P6fKsvSfOBQ1/YrOOe3s/pQYCs0cXlYYxJe4aRrJJkOtdFbqQhIQuU4oLUtVSXs8GUQ1TGpPAJn5wJqtTlbondPxnmDFQpR62ouyEQazx+NNIK5yFIUD/vUzxU52hk6KoPk4aiJeQN0Kgqz5F+H3Cj+Hy0sGVpLnv3rCCLWaUz3lK7oXbCHtf+4ZOYK99y73Gv/4oX3lrFB4ms6Dgh6HOVYnCSkFshHAhigSukrKFKhu9PAbIzsg62hl3DXSTWpt3b4Yt6uzF+gWO2Oj4SatbcPd/9/UPVJkiV4TejldbLRb5C4Z9YqA= matrix: - QUILT_URL="https://quiltdata.com" - QUILT_URL="https://quilt-heroku.herokuapp.com" From d7b68d3ad83a0575cd50f1e4c43250ae43bdc207 Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 15:19:11 -0700 Subject: [PATCH 51/56] pip caching --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 2dc60a2..884fd6e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,5 @@ language: python +cache: pip python: - '2.7' env: From 9a824ad7cb252b2f3670014400b3ab24a49e4f9e Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Mon, 19 Sep 2016 15:55:21 -0700 Subject: [PATCH 52/56] add list length asserts --- tests/test_tables.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_tables.py b/tests/test_tables.py index 9669023..145487b 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -54,6 +54,8 @@ def test_create_table_from_file(): assert ['col1', 'col2'] == [col.name for col in t1.columns] fields = [col.field for col in t1.columns] rows = [row for row in t1] + assert(len(rows) == 2) + assert(len(fields) == 2) assert rows[0][fields[0]] == 1 assert rows[0][fields[1]] == 'a' assert rows[1][fields[0]] == 2 From 883ee6128cc074d9490a65e3407f165baa249ff1 Mon Sep 17 00:00:00 2001 From: brennv Date: Mon, 19 Sep 2016 16:05:46 -0700 Subject: [PATCH 53/56] test sleep and verbose tests --- .travis.yml | 2 +- tests/test_tables.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 884fd6e..a8fd5d4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,5 +11,5 @@ env: - QUILT_URL="https://quilt-heroku.herokuapp.com" before_install: pip install pytest pytest-cov install: pip install -r requirements.txt -script: python -m pytest --cov=quilt/ tests +script: python -m pytest --cov=quilt/ tests -v after_success: bash <(curl -s https://codecov.io/bash) diff --git a/tests/test_tables.py b/tests/test_tables.py index 145487b..70f87c2 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -1,5 +1,6 @@ import os from random import randint +import time from quilt import Connection import pandas as pd @@ -51,11 +52,12 @@ def test_create_table_from_file(): test_description = 'test file ' + test_name t1 = conn.create_table(name=test_name, description=test_description, inputfile=test_file) + time.sleep(10) assert ['col1', 'col2'] == [col.name for col in t1.columns] fields = [col.field for col in t1.columns] rows = [row for row in t1] - assert(len(rows) == 2) - assert(len(fields) == 2) + assert len(rows) == 2 + assert len(fields) == 2 assert rows[0][fields[0]] == 1 assert rows[0][fields[1]] == 'a' assert rows[1][fields[0]] == 2 From 4322f14cb5bd2815b0d6b3d5ed38b245ac44a59d Mon Sep 17 00:00:00 2001 From: Quilt Data Date: Mon, 19 Sep 2016 16:27:48 -0700 Subject: [PATCH 54/56] Update README.rst add travis status --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index b2637b6..0de94b4 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,5 @@ +.. image:: https://travis-ci.org/quiltdata/python-api.svg?branch=master + :target: https://travis-ci.org/quiltdata/python-api Python ====== From 3df9d8e1fc0335a6fb1f38d7bbead215c9f4b70d Mon Sep 17 00:00:00 2001 From: brennv Date: Mon, 19 Sep 2016 17:23:42 -0700 Subject: [PATCH 55/56] shorten sleep --- tests/test_tables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_tables.py b/tests/test_tables.py index 70f87c2..1537398 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -52,7 +52,7 @@ def test_create_table_from_file(): test_description = 'test file ' + test_name t1 = conn.create_table(name=test_name, description=test_description, inputfile=test_file) - time.sleep(10) + time.sleep(1) assert ['col1', 'col2'] == [col.name for col in t1.columns] fields = [col.field for col in t1.columns] rows = [row for row in t1] From 2dca6883e626bf8eacc8218ee51007eff378d12e Mon Sep 17 00:00:00 2001 From: Aneesh Karve Date: Wed, 21 Sep 2016 13:06:56 -0700 Subject: [PATCH 56/56] strip dead doc lines --- README.rst | 39 --------------------------------------- 1 file changed, 39 deletions(-) diff --git a/README.rst b/README.rst index 0de94b4..a55e9fe 100644 --- a/README.rst +++ b/README.rst @@ -196,45 +196,6 @@ and subtracted. result = tableA.intersect_wao(tableB) result = tableA.subtract(tableB) -Convert files to live data sets on Quilt -======================================== - -Optional prep (your steps may vary) ------------------------------------ - -#. Get a list of files you want to upload (see ``get-files-to-upload/``) -#. Download the files in the list (see ``curl-all.py``) -#. Unzip downloaded files (if needed) - - .. code:: bash - - cd downloads - gunzip *.gz - -#. | Use ``data_set.py`` to create data sets on Quilt (see - ``python data_set.py --help``). - | You will need a Quilt username and password. Or use ``batch.py`` to - create multiple data sets. - - .. code:: bash - - python data_set.py - -e https://quiltdata.com - -u USERNAME - -n "ENCODE data" - -d "#A549 #histone peak data #hg19" - -f downloads/wgEncodeBroadHistoneNhaH3k36me3StdPk.broadPeak - -File formats in this example ----------------------------- - -- `ENCODE broadPeak format `_ - -Resources ---------- - -- `ENCODE Project `_ - Development -----------