Skip to content

Commit

Permalink
minor refinements
Browse files Browse the repository at this point in the history
  • Loading branch information
ypriverol committed Sep 25, 2024
1 parent 750938f commit 2805e22
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 22 deletions.
2 changes: 1 addition & 1 deletion files/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def copy_raw_files_from_dir(self, accession, source_base_directory):
"""

# get the full path where you can find the raw files in the file system
# to support that, data should be written in following format:
# to support that, data should be written in the following format:
# base/path/ + yyyy/mm/accession/ + submitted/
path_fragment = self.get_submitted_file_path_prefix(accession)
complete_source_dir = source_base_directory + "/" + path_fragment + "/submitted/"
Expand Down
5 changes: 2 additions & 3 deletions pridepy.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from project.project import Project
from protein.protein import Protein
from spectra.spectra import Spectra
from util.api_handling import Util
from util.file_handling import FileHanding


Expand Down Expand Up @@ -101,8 +100,8 @@ def update_metadata(filename, username, password):
token = authentication.get_token(username, password)

# Format extracted metadata to compatible with PRIDE API endpoint
fileHandling = FileHanding()
fileHandling.wrap_with_ms_run_metadata(filename)
file_handling = FileHanding()
file_handling.wrap_with_ms_run_metadata(filename)

# Update msrun metatdata
msrun = MsRun()
Expand Down
18 changes: 10 additions & 8 deletions project/project.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python
import json

from util.api_handling import Util

Expand All @@ -24,7 +23,8 @@ def get_projects(self, page_size, page, sort_direction, sort_conditions):
:param sort_conditions: Field(s) for sorting the results on
:return: paged peptide_evidences in json format
"""
request_url = self.API_BASE_URL + "projects?" + "pageSize=" + str(page_size) + "&page=" + str(page) + "&sortDirection=" + sort_direction + "&sortConditions=" + sort_conditions
request_url = self.API_BASE_URL + "projects?" + "pageSize=" + str(page_size) + "&page=" + str(
page) + "&sortDirection=" + sort_direction + "&sortConditions=" + sort_conditions
headers = {"Accept": "application/JSON"}
response = Util.get_api_call(request_url, headers)
return response.json()
Expand Down Expand Up @@ -66,7 +66,8 @@ def get_files_by_accession(self, accession, query_filter, page_size, page, sort_
if query_filter:
request_url = request_url + "filter=" + query_filter + "&"

request_url = request_url + "pageSize=" + str(page_size) + "&page=" + str(page) + "&sortDirection=" + sort_direction + "&sortConditions=" + sort_conditions
request_url = request_url + "pageSize=" + str(page_size) + "&page=" + str(
page) + "&sortDirection=" + sort_direction + "&sortConditions=" + sort_conditions

headers = {"Accept": "application/JSON"}
response = Util.get_api_call(request_url, headers)
Expand All @@ -78,12 +79,12 @@ def get_private_files_by_accession(self, accession, user, passwd):
request_url = self.PRIVATE_API_BASE_URL + "getAAPTokenWeb"
headers = {"Content-Type": "application/json;charset=UTF-8"}

data = {"Credentials":{"username": user,"password": passwd}}
aapToken = Util.post_api_call(request_url, headers, data)
print(aapToken.text)
data = {"Credentials": {"username": user, "password": passwd}}
aap_token = Util.post_api_call(request_url, headers, data)
print(aap_token.text)

request_url = self.PRIVATE_API_BASE_URL + "projects/" + accession + "/files"
headers = {"Authorization": "Bearer " + aapToken.text}
headers = {"Authorization": "Bearer " + aap_token.text}

while True:
response = Util.get_api_call(request_url, headers)
Expand Down Expand Up @@ -113,7 +114,8 @@ def get_similar_projects_by_accession(self, accession):
response = Util.get_api_call(request_url, headers)
return response.json()

def search_by_keywords_and_filters(self, keyword, query_filter, page_size, page, date_gap, sort_direction, sort_fields):
def search_by_keywords_and_filters(self, keyword, query_filter, page_size, page, date_gap, sort_direction,
sort_fields):
"""
search PRIDE API projects by keyword and filters
:param keyword: keyword to search projects
Expand Down
15 changes: 8 additions & 7 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
requests
ratelimit
click
pytest
setuptools
plotly
boto3
requests~=2.31.0
ratelimit~=2.2.1
click~=8.1.7
pytest~=8.0.2
setuptools~=58.0.4
plotly~=5.22.0
boto3~=1.34.61
botocore~=1.34.74
4 changes: 2 additions & 2 deletions spectra/spectra.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ def spectra_evidences(self, usi, project_accession, assay_accession, peptide_seq
request_url = self.api_base_url + "spectra?"

if usi:
usiArray = usi.split("\\n")
for usiElement in usiArray:
usi_array = usi.split("\\n")
for usiElement in usi_array:
request_url = request_url + "usi=" + usiElement + "&"

if project_accession:
Expand Down
1 change: 0 additions & 1 deletion tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from peptide.peptide import Peptide
from project.project import Project
from protein.protein import Protein
from spectra.spectra import Spectra


class TestSearch(TestCase):
Expand Down

0 comments on commit 2805e22

Please sign in to comment.