Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ before_script:
- "echo 'backend: Agg' > matplotlibrc"

# command to run tests
script: export OMP_NUM_THREADS=1 && pytest projectq --cov projectq
script: export OMP_NUM_THREADS=1 && pytest projectq --cov projectq -p no:warnings

after_success:
- coveralls
2 changes: 1 addition & 1 deletion projectq/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# limitations under the License.

"""Define version number here and read it from setup.py automatically"""
__version__ = "0.5.1"
__version__ = "0.5.2"
37 changes: 14 additions & 23 deletions projectq/backends/_ibm/_ibm_http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _authenticate(self, token=None):
def _run(self, info, device):
"""
Run the quantum code to the IBMQ machine.
Update since March2020: only protocol available is what they call
Update since September 2020: only protocol available is what they call
'object storage' where a job request via the POST method gets in
return a url link to which send the json data. A final http validates
the data communication.
Expand Down Expand Up @@ -162,19 +162,10 @@ def _run(self, info, device):
**json_step1)
request.raise_for_status()
r_json = request.json()
download_endpoint_url = r_json['objectStorageInfo'][
'downloadQObjectUrlEndpoint']
upload_endpoint_url = r_json['objectStorageInfo'][
'uploadQobjectUrlEndpoint']
upload_url = r_json['objectStorageInfo']['uploadUrl']
execution_id = r_json['id']

# STEP2: WE USE THE ENDPOINT TO GET THE UPLOT LINK
json_step2 = {'allow_redirects': True, 'timeout': (5.0, None)}
request = super(IBMQ, self).get(upload_endpoint_url, **json_step2)
request.raise_for_status()
r_json = request.json()

# STEP3: WE USE THE ENDPOINT TO GET THE UPLOT LINK
# STEP2: WE UPLOAD THE CIRCUIT DATA
n_classical_reg = info['nq']
# hack: easier to restrict labels to measured qubits
n_qubits = n_classical_reg # self.backends[device]['nq']
Expand All @@ -194,7 +185,7 @@ def _run(self, info, device):
data += ('"parameter_binds": [], "memory_slots": '
+ str(n_classical_reg))
data += (', "n_qubits": ' + str(n_qubits)
+ '}, "schema_version": "1.1.0", ')
+ '}, "schema_version": "1.2.0", ')
data += '"type": "QASM", "experiments": [{"config": '
data += '{"n_qubits": ' + str(n_qubits) + ', '
data += '"memory_slots": ' + str(n_classical_reg) + '}, '
Expand All @@ -205,31 +196,31 @@ def _run(self, info, device):
data += '"clbit_labels": ' + str(c_label).replace('\'', '\"') + ', '
data += '"memory_slots": ' + str(n_classical_reg) + ', '
data += '"creg_sizes": [["c", ' + str(n_classical_reg) + ']], '
data += ('"name": "circuit0"}, "instructions": ' + instruction_str
data += ('"name": "circuit0", "global_phase": 0}, "instructions": ' + instruction_str
+ '}]}')

json_step3 = {
json_step2 = {
'data': data,
'params': {
'access_token': None
},
'timeout': (5.0, None)
}
request = super(IBMQ, self).put(r_json['url'], **json_step3)
request = super(IBMQ, self).put(upload_url, **json_step2)
request.raise_for_status()

# STEP4: CONFIRM UPLOAD
json_step4 = {
# STEP3: CONFIRM UPLOAD
json_step3 = {
'data': None,
'json': None,
'timeout': (self.timeout, None)
}
upload_data_url = upload_endpoint_url.replace('jobUploadUrl',
'jobDataUploaded')
request = super(IBMQ, self).post(upload_data_url, **json_step4)

upload_data_url = urljoin(_API_URL,
'Network/ibm-q/Groups/open/Projects/main/Jobs/'+str(execution_id)
+'/jobDataUploaded')
request = super(IBMQ, self).post(upload_data_url, **json_step3)
request.raise_for_status()
r_json = request.json()
execution_id = upload_endpoint_url.split('/')[-2]

return execution_id

Expand Down
30 changes: 16 additions & 14 deletions projectq/backends/_ibm/_ibm_http_client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def raise_for_status(self):
# Accessing status of device. Return online.
status_url = 'Network/ibm-q/Groups/open/Projects/main/devices/v/1'
if (args[1] == urljoin(_API_URL, status_url)
and (request_num[0] == 1 or request_num[0] == 7)):
and (request_num[0] == 1 or request_num[0] == 6)):
request_num[0] += 1
connections = set([(0, 1), (1, 0), (1, 2), (1, 3), (1, 4), (2, 1),
(2, 3), (2, 4), (3, 1), (3, 4), (4, 3)])
Expand All @@ -97,15 +97,15 @@ def raise_for_status(self):
_API_URL,
"Network/ibm-q/Groups/open/Projects/main/Jobs/{execution_id}".
format(execution_id=execution_id)) and not result_ready[0]
and request_num[0] == 6):
and request_num[0] == 5):
result_ready[0] = True
request_num[0] += 1
return MockResponse({"status": "RUNNING"}, 200)
elif (args[1] == urljoin(
_API_URL,
"Network/ibm-q/Groups/open/Projects/main/Jobs/{execution_id}".
format(execution_id=execution_id)) and result_ready[0]
and request_num[0] == 8):
and request_num[0] == 7):
request_num[0] += 1
return MockResponse(
{"status": "COMPLETED"}, 200)
Expand All @@ -114,13 +114,13 @@ def raise_for_status(self):
_API_URL,
"Network/ibm-q/Groups/open/Projects/main/Jobs/{execution_id}/resultDownloadUrl".
format(execution_id=execution_id))
and request_num[0] == 9):
and request_num[0] == 8):
request_num[0] += 1
return MockResponse(
{"url": "result_download_url"}, 200)
#STEP7
elif (args[1] == "result_download_url"
and request_num[0] == 10):
and request_num[0] == 9):
request_num[0] += 1
return MockResponse(
{"results": [result]}, 200)
Expand Down Expand Up @@ -156,13 +156,14 @@ def raise_for_status(self):
answer1={'objectStorageInfo':{
'downloadQObjectUrlEndpoint':'url_dld_endpoint',
'uploadQobjectUrlEndpoint':'/'+execution_id+'/jobUploadUrl',
'uploadUrl':'url_upld'}
'uploadUrl':'url_upld'},
'id': execution_id
}
return MockPostResponse(answer1,200)

# STEP4
elif (args[1] == "/"+execution_id+"/jobDataUploaded"
and request_num[0] == 5):
elif (args[1] == urljoin(_API_URL, jobs_url + "/"+execution_id+"/jobDataUploaded")
and request_num[0] == 4):
request_num[0] += 1
return MockPostResponse({}, 200)

Expand All @@ -171,7 +172,7 @@ def raise_for_status(self):
_API_URL,
"Network/ibm-q/Groups/open/Projects/main/Jobs/{execution_id}/resultDownloaded".
format(execution_id=execution_id))
and request_num[0] == 11):
and request_num[0] == 10):
request_num[0] += 1
return MockPostResponse(
{}, 200)
Expand All @@ -195,8 +196,8 @@ def raise_for_status(self):
pass

# STEP3
if (args[1] == "s3_url"
and request_num[0] == 4):
if (args[1] == "url_upld"
and request_num[0] == 3):
request_num[0] += 1
return MockResponse({}, 200)

Expand Down Expand Up @@ -579,12 +580,13 @@ def raise_for_status(self):
answer1={'objectStorageInfo':{
'downloadQObjectUrlEndpoint':'url_dld_endpoint',
'uploadQobjectUrlEndpoint':'/'+execution_id+'/jobUploadUrl',
'uploadUrl':'url_upld'}
'uploadUrl':'url_upld'},
'id': execution_id,
}
return MockPostResponse(answer1,200)

# STEP4
elif (args[1] == "/"+execution_id+"/jobDataUploaded"):
elif (args[1] == urljoin(_API_URL, jobs_url + "/"+execution_id+"/jobDataUploaded")):
return MockPostResponse({}, 200)

def mocked_requests_put(*args, **kwargs):
Expand All @@ -606,7 +608,7 @@ def raise_for_status(self):
pass

# STEP3
if (args[1] == "s3_url"):
if (args[1] == "url_upld"):
return MockResponse({}, 200)

monkeypatch.setattr("requests.sessions.Session.get", mocked_requests_get)
Expand Down
7 changes: 3 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,10 +370,9 @@ def _configure_cxx_standard(self):
cxx_standards = [year for year in cxx_standards if year < 17]

if sys.platform == 'darwin':
_, minor_version, _ = [
int(i) for i in platform.mac_ver()[0].split('.')
]
if minor_version < 14:
major_version = int(platform.mac_ver()[0].split('.')[0])
minor_version = int(platform.mac_ver()[0].split('.')[1])
if major_version <= 10 and minor_version < 14:
cxx_standards = [year for year in cxx_standards if year < 17]

for year in cxx_standards:
Expand Down