11
11
from typing import Any
12
12
from typing import cast
13
13
14
+ import tomli
15
+ import tomli_w
16
+
14
17
from poetry .core .packages .dependency import Dependency
15
18
from poetry .core .packages .package import Package
16
19
from poetry .core .semver .helpers import parse_constraint
17
20
from poetry .core .semver .version import Version
18
- from poetry .core .toml .file import TOMLFile
19
21
from poetry .core .version .markers import parse_marker
20
22
from poetry .core .version .requirements import InvalidRequirement
21
- from tomlkit import array
22
- from tomlkit import comment
23
- from tomlkit import document
24
- from tomlkit import inline_table
25
- from tomlkit import item
26
- from tomlkit import table
27
- from tomlkit .exceptions import TOMLKitError
28
- from tomlkit .items import Array
29
23
30
24
31
25
if TYPE_CHECKING :
32
26
from poetry .core .packages .directory_dependency import DirectoryDependency
33
27
from poetry .core .packages .file_dependency import FileDependency
34
28
from poetry .core .packages .url_dependency import URLDependency
35
29
from poetry .core .packages .vcs_dependency import VCSDependency
36
- from tomlkit .items import Table
37
- from tomlkit .toml_document import TOMLDocument
38
30
39
31
from poetry .repositories .lockfile_repository import LockfileRepository
40
32
@@ -54,17 +46,17 @@ class Locker:
54
46
_relevant_keys = [* _legacy_keys , "group" ]
55
47
56
48
def __init__ (self , lock : str | Path , local_config : dict [str , Any ]) -> None :
57
- self ._lock = TOMLFile (lock )
49
+ self ._lock = lock if isinstance ( lock , Path ) else Path (lock )
58
50
self ._local_config = local_config
59
- self ._lock_data : TOMLDocument | None = None
51
+ self ._lock_data : dict [ str , Any ] | None = None
60
52
self ._content_hash = self ._get_content_hash ()
61
53
62
54
@property
63
- def lock (self ) -> TOMLFile :
55
+ def lock (self ) -> Path :
64
56
return self ._lock
65
57
66
58
@property
67
- def lock_data (self ) -> TOMLDocument :
59
+ def lock_data (self ) -> dict [ str , Any ] :
68
60
if self ._lock_data is None :
69
61
self ._lock_data = self ._get_lock_data ()
70
62
@@ -74,7 +66,7 @@ def is_locked(self) -> bool:
74
66
"""
75
67
Checks whether the locker has been locked (lockfile found).
76
68
"""
77
- if not self ._lock .exists ():
69
+ if not self .lock .exists ():
78
70
return False
79
71
80
72
return "package" in self .lock_data
@@ -83,7 +75,8 @@ def is_fresh(self) -> bool:
83
75
"""
84
76
Checks whether the lock file is still up to date with the current hash.
85
77
"""
86
- lock = self ._lock .read ()
78
+ with self .lock .open ("rb" ) as f :
79
+ lock = tomli .load (f )
87
80
metadata = lock .get ("metadata" , {})
88
81
89
82
if "content-hash" in metadata :
@@ -115,7 +108,7 @@ def locked_repository(self) -> LockfileRepository:
115
108
source_type = source .get ("type" )
116
109
url = source .get ("url" )
117
110
if source_type in ["directory" , "file" ]:
118
- url = self ._lock . path .parent .joinpath (url ).resolve ().as_posix ()
111
+ url = self .lock .parent .joinpath (url ).resolve ().as_posix ()
119
112
120
113
name = info ["name" ]
121
114
package = Package (
@@ -199,7 +192,7 @@ def locked_repository(self) -> LockfileRepository:
199
192
package .marker = parse_marker (split_dep [1 ].strip ())
200
193
201
194
for dep_name , constraint in info .get ("dependencies" , {}).items ():
202
- root_dir = self ._lock . path .parent
195
+ root_dir = self .lock .parent
203
196
if package .source_type == "directory" :
204
197
# root dir should be the source of the package relative to the lock
205
198
# path
@@ -226,27 +219,21 @@ def locked_repository(self) -> LockfileRepository:
226
219
return repository
227
220
228
221
def set_lock_data (self , root : Package , packages : list [Package ]) -> bool :
229
- files : dict [str , Any ] = table ()
222
+ files : dict [str , Any ] = {}
230
223
package_specs = self ._lock_packages (packages )
231
224
# Retrieving hashes
232
225
for package in package_specs :
233
226
if package ["name" ] not in files :
234
227
files [package ["name" ]] = []
235
228
236
229
for f in package ["files" ]:
237
- file_metadata = inline_table ()
230
+ file_metadata = {}
238
231
for k , v in sorted (f .items ()):
239
232
file_metadata [k ] = v
240
233
241
234
files [package ["name" ]].append (file_metadata )
242
235
243
- if files [package ["name" ]]:
244
- package_files = item (files [package ["name" ]])
245
- assert isinstance (package_files , Array )
246
- files [package ["name" ]] = package_files .multiline (True )
247
-
248
- lock = document ()
249
- lock .add (comment (GENERATED_COMMENT ))
236
+ lock : dict [str , Any ] = {}
250
237
lock ["package" ] = package_specs
251
238
252
239
if root .extras :
@@ -268,12 +255,10 @@ def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
268
255
269
256
return False
270
257
271
- def _write_lock_data (self , data : TOMLDocument ) -> None :
272
- self .lock .write (data )
273
-
274
- # Checking lock file data consistency
275
- if data != self .lock .read ():
276
- raise RuntimeError ("Inconsistent lock file data." )
258
+ def _write_lock_data (self , data : dict [str , Any ]) -> None :
259
+ with self .lock .open ("wb" ) as f :
260
+ f .write (f"# { GENERATED_COMMENT } \n \n " .encode ())
261
+ tomli_w .dump (data , f )
277
262
278
263
self ._lock_data = None
279
264
@@ -294,16 +279,17 @@ def _get_content_hash(self) -> str:
294
279
295
280
return sha256 (json .dumps (relevant_content , sort_keys = True ).encode ()).hexdigest ()
296
281
297
- def _get_lock_data (self ) -> TOMLDocument :
298
- if not self ._lock .exists ():
282
+ def _get_lock_data (self ) -> dict [ str , Any ] :
283
+ if not self .lock .exists ():
299
284
raise RuntimeError ("No lockfile found. Unable to read locked packages" )
300
285
301
- try :
302
- lock_data : TOMLDocument = self ._lock .read ()
303
- except TOMLKitError as e :
304
- raise RuntimeError (f"Unable to read the lock file ({ e } )." )
286
+ with self .lock .open ("rb" ) as f :
287
+ try :
288
+ lock_data = tomli .load (f )
289
+ except tomli .TOMLDecodeError as e :
290
+ raise RuntimeError (f"Unable to read the lock file ({ e } )." )
305
291
306
- metadata = cast ( "Table" , lock_data ["metadata" ])
292
+ metadata = lock_data ["metadata" ]
307
293
lock_version = Version .parse (metadata .get ("lock-version" , "1.0" ))
308
294
current_version = Version .parse (self ._VERSION )
309
295
accepted_versions = parse_constraint (self ._READ_VERSION_RANGE )
@@ -354,7 +340,7 @@ def _dump_package(self, package: Package) -> dict[str, Any]:
354
340
if dependency .pretty_name not in dependencies :
355
341
dependencies [dependency .pretty_name ] = []
356
342
357
- constraint = inline_table ()
343
+ constraint : dict [ str , Any ] = {}
358
344
359
345
if dependency .is_directory ():
360
346
dependency = cast ("DirectoryDependency" , dependency )
@@ -420,14 +406,10 @@ def _dump_package(self, package: Package) -> dict[str, Any]:
420
406
}
421
407
422
408
if dependencies :
423
- data ["dependencies" ] = table ()
424
- for k , constraints in dependencies .items ():
425
- if len (constraints ) == 1 :
426
- data ["dependencies" ][k ] = constraints [0 ]
427
- else :
428
- data ["dependencies" ][k ] = array ().multiline (True )
429
- for constraint in constraints :
430
- data ["dependencies" ][k ].append (constraint )
409
+ data ["dependencies" ] = {
410
+ name : constraints [0 ] if len (constraints ) == 1 else constraints
411
+ for name , constraints in dependencies .items ()
412
+ }
431
413
432
414
if package .extras :
433
415
extras = {}
@@ -443,7 +425,7 @@ def _dump_package(self, package: Package) -> dict[str, Any]:
443
425
url = Path (
444
426
os .path .relpath (
445
427
Path (url ).resolve (),
446
- Path (self ._lock . path .parent ).resolve (),
428
+ Path (self .lock .parent ).resolve (),
447
429
)
448
430
).as_posix ()
449
431
0 commit comments