Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions tests/unit/api/test_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ def test_with_files_no_serial(self, db_request, content_type, renderer_override)
"yanked": False,
"size": f.size,
"upload-time": f.upload_time.isoformat() + "Z",
"data-dist-info-metadata": False,
}
for f in files
],
Expand Down Expand Up @@ -323,6 +324,7 @@ def test_with_files_with_serial(self, db_request, content_type, renderer_overrid
"yanked": False,
"size": f.size,
"upload-time": f.upload_time.isoformat() + "Z",
"data-dist-info-metadata": False,
}
for f in files
],
Expand Down Expand Up @@ -370,6 +372,7 @@ def test_with_files_with_version_multi_digit(
release=r,
filename=f"{project.name}-{r.version}.whl",
packagetype="bdist_wheel",
metadata_file_sha256_digest="deadbeefdeadbeefdeadbeefdeadbeef",
)
for r in releases
]
Expand Down Expand Up @@ -405,6 +408,11 @@ def test_with_files_with_version_multi_digit(
"yanked": False,
"size": f.size,
"upload-time": f.upload_time.isoformat() + "Z",
"data-dist-info-metadata": {
"sha256": "deadbeefdeadbeefdeadbeefdeadbeef"
}
if f.metadata_file_sha256_digest is not None
else False,
}
for f in files
],
Expand Down
144 changes: 122 additions & 22 deletions tests/unit/forklift/test_legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,26 @@ def _get_tar_testdata(compression_type=""):
return temp_f.getvalue()


def _get_whl_testdata(name="fake_package", version="1.0"):
temp_f = io.BytesIO()
with zipfile.ZipFile(file=temp_f, mode="w") as zfp:
zfp.writestr(f"{name}-{version}.dist-info/METADATA", "Fake metadata")
return temp_f.getvalue()


def _storage_hash(data):
return hashlib.blake2b(data, digest_size=256 // 8).hexdigest()


_TAR_GZ_PKG_TESTDATA = _get_tar_testdata("gz")
_TAR_GZ_PKG_MD5 = hashlib.md5(_TAR_GZ_PKG_TESTDATA).hexdigest()
_TAR_GZ_PKG_SHA256 = hashlib.sha256(_TAR_GZ_PKG_TESTDATA).hexdigest()
_TAR_GZ_PKG_STORAGE_HASH = hashlib.blake2b(
_TAR_GZ_PKG_TESTDATA, digest_size=256 // 8
).hexdigest()
_TAR_GZ_PKG_STORAGE_HASH = _storage_hash(_TAR_GZ_PKG_TESTDATA)

_TAR_BZ2_PKG_TESTDATA = _get_tar_testdata("bz2")
_TAR_BZ2_PKG_MD5 = hashlib.md5(_TAR_BZ2_PKG_TESTDATA).hexdigest()
_TAR_BZ2_PKG_SHA256 = hashlib.sha256(_TAR_BZ2_PKG_TESTDATA).hexdigest()
_TAR_BZ2_PKG_STORAGE_HASH = hashlib.blake2b(
_TAR_BZ2_PKG_TESTDATA, digest_size=256 // 8
).hexdigest()
_TAR_BZ2_PKG_STORAGE_HASH = _storage_hash(_TAR_BZ2_PKG_TESTDATA)


class TestExcWithMessage:
Expand Down Expand Up @@ -2771,6 +2778,8 @@ def test_upload_succeeds_with_wheel(
RoleFactory.create(user=user, project=project)

filename = f"{project.name}-{release.version}-cp34-none-{plat}.whl"
filebody = _get_whl_testdata(name=project.name, version=release.version)
filestoragehash = _storage_hash(filebody)

pyramid_config.testing_securitypolicy(identity=user)
db_request.user = user
Expand All @@ -2782,19 +2791,22 @@ def test_upload_succeeds_with_wheel(
"version": release.version,
"filetype": "bdist_wheel",
"pyversion": "cp34",
"md5_digest": _TAR_GZ_PKG_MD5,
"md5_digest": hashlib.md5(filebody).hexdigest(),
"content": pretend.stub(
filename=filename,
file=io.BytesIO(_TAR_GZ_PKG_TESTDATA),
type="application/tar",
file=io.BytesIO(filebody),
type="application/zip",
),
}
)

@pretend.call_recorder
def storage_service_store(path, file_path, *, meta):
with open(file_path, "rb") as fp:
assert fp.read() == _TAR_GZ_PKG_TESTDATA
if file_path.endswith(".metadata"):
assert fp.read() == b"Fake metadata"
else:
assert fp.read() == filebody

storage_service = pretend.stub(store=storage_service_store)

Expand All @@ -2818,9 +2830,9 @@ def storage_service_store(path, file_path, *, meta):
pretend.call(
"/".join(
[
_TAR_GZ_PKG_STORAGE_HASH[:2],
_TAR_GZ_PKG_STORAGE_HASH[2:4],
_TAR_GZ_PKG_STORAGE_HASH[4:],
filestoragehash[:2],
filestoragehash[2:4],
filestoragehash[4:],
filename,
]
),
Expand All @@ -2831,7 +2843,24 @@ def storage_service_store(path, file_path, *, meta):
"package-type": "bdist_wheel",
"python-version": "cp34",
},
)
),
pretend.call(
"/".join(
[
filestoragehash[:2],
filestoragehash[2:4],
filestoragehash[4:],
filename + ".metadata",
]
),
mock.ANY,
meta={
"project": project.normalized_name,
"version": release.version,
"package-type": "bdist_wheel",
"python-version": "cp34",
},
),
]

# Ensure that a File object has been created.
Expand Down Expand Up @@ -2884,6 +2913,8 @@ def test_upload_succeeds_with_wheel_after_sdist(
RoleFactory.create(user=user, project=project)

filename = f"{project.name}-{release.version}-cp34-none-any.whl"
filebody = _get_whl_testdata(name=project.name, version=release.version)
filestoragehash = _storage_hash(filebody)

pyramid_config.testing_securitypolicy(identity=user)
db_request.user = user
Expand All @@ -2895,19 +2926,22 @@ def test_upload_succeeds_with_wheel_after_sdist(
"version": release.version,
"filetype": "bdist_wheel",
"pyversion": "cp34",
"md5_digest": "335c476dc930b959dda9ec82bd65ef19",
"md5_digest": hashlib.md5(filebody).hexdigest(),
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
type="application/tar",
file=io.BytesIO(filebody),
type="application/zip",
),
}
)

@pretend.call_recorder
def storage_service_store(path, file_path, *, meta):
with open(file_path, "rb") as fp:
assert fp.read() == b"A fake file."
if file_path.endswith(".metadata"):
assert fp.read() == b"Fake metadata"
else:
assert fp.read() == filebody

storage_service = pretend.stub(store=storage_service_store)
db_request.find_service = pretend.call_recorder(
Expand All @@ -2930,9 +2964,9 @@ def storage_service_store(path, file_path, *, meta):
pretend.call(
"/".join(
[
"4e",
"6e",
"fa4c0ee2bbad071b4f5b5ea68f1aea89fa716e7754eb13e2314d45a5916e",
filestoragehash[:2],
filestoragehash[2:4],
filestoragehash[4:],
filename,
]
),
Expand All @@ -2943,7 +2977,24 @@ def storage_service_store(path, file_path, *, meta):
"package-type": "bdist_wheel",
"python-version": "cp34",
},
)
),
pretend.call(
"/".join(
[
filestoragehash[:2],
filestoragehash[2:4],
filestoragehash[4:],
filename + ".metadata",
]
),
mock.ANY,
meta={
"project": project.normalized_name,
"version": release.version,
"package-type": "bdist_wheel",
"python-version": "cp34",
},
),
]

# Ensure that a File object has been created.
Expand Down Expand Up @@ -3025,6 +3076,55 @@ def test_upload_fails_with_unsupported_wheel_plat(
"400 Binary wheel .* has an unsupported platform tag .*", resp.status
)

def test_upload_fails_with_missing_metadata_wheel(
self, monkeypatch, pyramid_config, db_request
):
user = UserFactory.create()
pyramid_config.testing_securitypolicy(identity=user)
db_request.user = user
EmailFactory.create(user=user)
project = ProjectFactory.create()
release = ReleaseFactory.create(project=project, version="1.0")
RoleFactory.create(user=user, project=project)

temp_f = io.BytesIO()
with zipfile.ZipFile(file=temp_f, mode="w") as zfp:
zfp.writestr(
f"{project.name.lower()}-{release.version}.dist-info/METADATA",
"Fake metadata",
)

filename = f"{project.name}-{release.version}-cp34-none-any.whl"
filebody = temp_f.getvalue()

db_request.POST = MultiDict(
{
"metadata_version": "1.2",
"name": project.name,
"version": release.version,
"filetype": "bdist_wheel",
"pyversion": "cp34",
"md5_digest": hashlib.md5(filebody).hexdigest(),
"content": pretend.stub(
filename=filename,
file=io.BytesIO(filebody),
type="application/zip",
),
}
)

monkeypatch.setattr(legacy, "_is_valid_dist_file", lambda *a, **kw: True)

with pytest.raises(HTTPBadRequest) as excinfo:
legacy.file_upload(db_request)

resp = excinfo.value

assert resp.status_code == 400
assert re.match(
"400 Wheel .* does not contain the required METADATA file: .*", resp.status
)

def test_upload_updates_existing_project_name(
self, pyramid_config, db_request, metrics
):
Expand Down
5 changes: 3 additions & 2 deletions tests/unit/packaging/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,6 +551,7 @@ def test_compute_paths(self, db_session):

assert rfile.path == expected
assert rfile.pgp_path == expected + ".asc"
assert rfile.metadata_path == expected + ".metadata"

def test_query_paths(self, db_session):
project = DBProjectFactory.create()
Expand All @@ -571,10 +572,10 @@ def test_query_paths(self, db_session):
)

results = (
db_session.query(File.path, File.pgp_path)
db_session.query(File.path, File.pgp_path, File.metadata_path)
.filter(File.id == rfile.id)
.limit(1)
.one()
)

assert results == (expected, expected + ".asc")
assert results == (expected, expected + ".asc", expected + ".metadata")
60 changes: 59 additions & 1 deletion tests/unit/packaging/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,65 @@ def mock_named_temporary_file():
assert primary_stub.get_metadata.calls == [pretend.call(file.path)]
assert primary_stub.get.calls == [pretend.call(file.path)]
assert archive_stub.store.calls == [
pretend.call(file.path, "/tmp/wutang", meta={"fizz": "buzz"})
pretend.call(file.path, "/tmp/wutang", meta={"fizz": "buzz"}),
]
else:
assert primary_stub.get_metadata.calls == []
assert primary_stub.get.calls == []
assert archive_stub.store.calls == []


@pytest.mark.parametrize("archived", [True, False])
def test_sync_file_to_archive_includes_bonus_files(db_request, monkeypatch, archived):
file = FileFactory(
archived=archived,
has_signature=True,
metadata_file_sha256_digest="deadbeefdeadbeefdeadbeefdeadbeef",
)
primary_stub = pretend.stub(
get_metadata=pretend.call_recorder(lambda path: {"fizz": "buzz"}),
get=pretend.call_recorder(
lambda path: pretend.stub(read=lambda: b"my content")
),
)
archive_stub = pretend.stub(
store=pretend.call_recorder(lambda filename, path, meta=None: None)
)
db_request.find_service = pretend.call_recorder(
lambda iface, name=None: {"primary": primary_stub, "archive": archive_stub}[
name
]
)

@contextmanager
def mock_named_temporary_file():
yield pretend.stub(
name="/tmp/wutang",
write=lambda bites: None,
flush=lambda: None,
)

monkeypatch.setattr(tempfile, "NamedTemporaryFile", mock_named_temporary_file)

sync_file_to_archive(db_request, file.id)

assert file.archived

if not archived:
assert primary_stub.get_metadata.calls == [
pretend.call(file.path),
pretend.call(file.metadata_path),
pretend.call(file.pgp_path),
]
assert primary_stub.get.calls == [
pretend.call(file.path),
pretend.call(file.metadata_path),
pretend.call(file.pgp_path),
]
assert archive_stub.store.calls == [
pretend.call(file.path, "/tmp/wutang", meta={"fizz": "buzz"}),
pretend.call(file.metadata_path, "/tmp/wutang", meta={"fizz": "buzz"}),
pretend.call(file.pgp_path, "/tmp/wutang", meta={"fizz": "buzz"}),
]
else:
assert primary_stub.get_metadata.calls == []
Expand Down
4 changes: 3 additions & 1 deletion tests/unit/packaging/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ def test_render_simple_detail(db_request, monkeypatch, jinja):
release1 = ReleaseFactory.create(project=project, version="1.0")
release2 = ReleaseFactory.create(project=project, version="dog")
FileFactory.create(release=release1)
FileFactory.create(release=release2)
FileFactory.create(
release=release2, metadata_file_sha256_digest="beefdeadbeefdeadbeefdeadbeefdead"
)

fake_hasher = pretend.stub(
update=pretend.call_recorder(lambda x: None),
Expand Down
Loading