restructure analysis

This commit is contained in:
2025-12-09 21:05:07 +01:00
parent beddfee087
commit 514570062c
3 changed files with 413 additions and 211 deletions

View File

@@ -8,19 +8,36 @@ app = marimo.App(width="medium")
def _():
import marimo as mo
import pandas as pd
return mo, pd
from pathlib import Path
TAGUETTE_EXPORT_DIR = Path('./data/transcripts/taguette_results')
WORKING_DIR = Path('./data/processing/02_taguette_postprocess')
if not WORKING_DIR.exists():
WORKING_DIR.mkdir(parents=True)
if not TAGUETTE_EXPORT_DIR.exists():
TAGUETTE_EXPORT_DIR.mkdir(parents=True)
return TAGUETTE_EXPORT_DIR, mo, pd
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Step 1: Export All Highlights
def _(TAGUETTE_EXPORT_DIR, mo):
mo.md(rf"""
# Step 1: Export All Highlights out of Taguette
1. Go to: http://taguette.tail44fa00.ts.net/project/1
2. Select 'Highlights' on left
3. Select 'See all hightlights'
4. Top right 'Export this view' > 'CSV'
5.
5. Save to '{TAGUETTE_EXPORT_DIR}/all_tags.csv'
""")
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Step 2: Import here for processing
""")
return
@@ -36,7 +53,34 @@ def _(pd):
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
### Post-process the dataframe so it can be easily analyzed
# Step 3: Process each 'Interview'
""")
return
@app.cell
def _(all_tags_df, mo):
file_dropdown = mo.ui.dropdown(
options=all_tags_df['document'].unique().tolist(),
label="Select Interview to Process",
full_width=True
)
file_dropdown
return (file_dropdown,)
@app.cell
def _(all_tags_df, file_dropdown):
# filter all_tags_df to only the document = file_dropdown.value
df = all_tags_df.loc[all_tags_df['document'] == file_dropdown.value].copy()
return (df,)
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
### Add `_context` column to track Voice / Character is being referred to per highlight
Create a new column 'context', which is defined by the last '_V-' or '_C-' tag seen in the 'tags' column', when moving row by row from top to bottom.
1. Iterates through the dataframe in document order (row by row)
@@ -51,27 +95,23 @@ def _(mo):
| id | document | tag | content | _seq_id | _context |
|-----|-------------|------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
| 252 | P2 - Done | _C-Counselor | So we've pulled through your top personality, which was the counselor, and then we've included those same twelve voices from before. And your task now is to select which of the voices you feel best suits this character that would be, the personality and voice for Chase's digital assistant. | 115 | _C-Counselor |
| 88 | P2 - Done | VT - Knowledgeable / Trust | They I feel like they're like twins in that sense. Like, they both had this calming, like, calming voice that was smooth. It felt, like, but articulated and helpful, and, like, I felt reassured listening to them. | 116 | _V-54, _V-41 |
| 88 | P2 - Done | _V-54 | They I feel like they're like twins in that sense. Like, they both had this calming, like, calming voice that was smooth. It felt, like, but articulated and helpful, and, like, I felt reassured listening to them. | 117 | _V-54, _V-41 |
| 88 | P2 - Done | _V-41 | They I feel like they're like twins in that sense. Like, they both had this calming, like, calming voice that was smooth. It felt, like, but articulated and helpful, and, like, I felt reassured listening to them. | 118 | _V-54, _V-41 |
| 88 | P2 - Done | VT - Human / Artificial | They I feel like they're like twins in that sense. Like, they both had this calming, like, calming voice that was smooth. It felt, like, but articulated and helpful, and, like, I felt reassured listening to them. | 119 | _V-54, _V-41 |
| 88 | P2 - Done | VT - Friendliness / Empathy | They I feel like they're like twins in that sense. Like, they both had this calming, like, calming voice that was smooth. It felt, like, but articulated and helpful, and, like, I felt reassured listening to them. | 120 | _V-54, _V-41 |
| 90 | P2 - Done | VT - Personal 'click' | I picked the female because her voice is so unique. | 121 | _V-41 |
| 90 | P2 - Done | _V-41 | I picked the female because her voice is so unique. | 122 | _V-41 |
""")
return
@app.cell
def _(all_tags_df):
def _(df):
# First pass: identify context tags within each highlight group
all_tags_df['_context'] = None
df['_context'] = None
last_context = None
processed_ids = set()
# Process in document order
for idx, row in all_tags_df.iterrows():
for idx, row in df.iterrows():
highlight_id = row['id']
# If we haven't processed this highlight yet
@@ -79,7 +119,7 @@ def _(all_tags_df):
processed_ids.add(highlight_id)
# Get all rows for this highlight
highlight_rows = all_tags_df[all_tags_df['id'] == highlight_id]
highlight_rows = df[df['id'] == highlight_id]
# Collect all context tags in this highlight
context_tags = []
@@ -97,17 +137,144 @@ def _(all_tags_df):
context_tag = last_context
# Assign the context to all rows in this highlight
all_tags_df.loc[all_tags_df['id'] == highlight_id, '_context'] = context_tag
df.loc[df['id'] == highlight_id, '_context'] = context_tag
del idx
all_tags_df
df
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Step 2: Sentiment Analysis
## Resolve multi-context rows (only VT- and CT- theme tags)
For rows that have multiple contexts (e.g., both _V-54 and _V-41)
- split these into separate rows for each context.
- Then mark these for 'manual_analysis'
""")
return
@app.cell
def _(df, pd):
# Expand rows that contain multiple contexts (comma-separated)
expanded_rows = []
for _, _row in df.iterrows():
context_value = _row['_context']
has_multiple = pd.notna(context_value) and ',' in str(context_value)
if has_multiple:
contexts = [c.strip() for c in str(context_value).split(',')]
else:
contexts = [context_value]
if has_multiple:
for ctx in contexts:
new_row = _row.copy()
new_row['_context'] = ctx
new_row['manual_analysis'] = True
if str(new_row['tag']).startswith(('VT -', 'CT -')):
new_row['sentiment'] = None
expanded_rows.append(new_row)
else:
new_row = _row.copy()
new_row['_context'] = contexts[0]
new_row['manual_analysis'] = False
expanded_rows.append(new_row)
expanded_df_raw = pd.DataFrame(expanded_rows).reset_index(drop=True)
manual_rows = expanded_df_raw[expanded_df_raw['manual_analysis']]
if not manual_rows.empty:
print(
f"⚠️ {len(manual_rows)} rows were created from multi-context splits. "
"See next cell for manual review."
)
else:
print("✓ No multi-context rows found")
return (expanded_df_raw,)
@app.cell
def _(expanded_df_raw, mo):
# Filter for rows that need review. Manual analysis and the tag starts with 'VT -' or 'CT -'
rows_to_edit = expanded_df_raw[
(expanded_df_raw['manual_analysis'])
& (expanded_df_raw['tag'].str.startswith(('VT -', 'CT -'), na=False))
]
# Create data editor for split rows
split_rows_editor = mo.ui.data_editor(
rows_to_edit
).form(label="Update Sentiment / Manual Flag")
return rows_to_edit, split_rows_editor
@app.cell(hide_code=True)
def _(mo, rows_to_edit, split_rows_editor):
mo.vstack([
mo.md(f"""
### ⚠️ Manual Review Required
**{len(rows_to_edit)} rows** were split from multi-context entries.
Please review them below:
1. Update the `sentiment` column (-1, 0, 1) for each row based on the specific context.
2. Uncheck `manual_analysis` when you are done reviewing a row.
3. Click **Submit** to apply changes.
"""),
split_rows_editor
])
return
@app.cell
def _(expanded_df_raw, mo, pd, split_rows_editor):
# Reconstruct the full dataframe using the editor's current value
# This will update whenever the user edits the table
mo.stop(split_rows_editor.value is None, mo.md("Submit your changes."))
_edited_rows = split_rows_editor.value
_static_rows = expanded_df_raw[~expanded_df_raw['manual_analysis']]
expanded_df2 = pd.concat([_static_rows, _edited_rows]).sort_index()
return (expanded_df2,)
@app.cell
def _(expanded_df2, pd):
# Verify no rows have multiple contexts
try:
has_comma = expanded_df2['_context'].apply(lambda x: ',' in str(x) if pd.notna(x) else False)
assert not has_comma.any(), "Some rows still have multiple contexts (comma-separated)"
# Verify that rows still marked for manual analysis have sentiment values
manual_sent_rows = expanded_df2[expanded_df2['manual_analysis']]
theme_rows = manual_sent_rows[manual_sent_rows['tag'].str.startswith(('VT -', 'CT -'), na=False)]
missing_sentiment = theme_rows[theme_rows['sentiment'].isna()]
assert missing_sentiment.empty, (
f"{len(missing_sentiment)} rows marked for manual analysis "
"have missing sentiment values"
)
print("\n✓ Verification passed: Manual-analysis rows are consistent")
expanded_df_final = expanded_df2
expanded_df_final
except AssertionError as e:
print(f"\n❌ Verification failed: {e}")
print("Please review the data before proceeding")
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Highlight Sentiment Analysis
For each row in the dataframe, analyze the sentiment of the 'content' regarding the respective tag. This should be done for all 'VT -' and 'CT -' tags, since these represent the 'VoiceThemes' and 'CharacterThemes' respectively. The results should be stored in a new 'sentiment' column.
@@ -120,7 +287,7 @@ def _(mo):
@app.cell
def _(all_tags_df):
def _(df):
# TODO: Implement sentiment analysis and add 'sentiment' column
# for now, create an empty sentiment column with randomized dummy values for testing
@@ -132,181 +299,32 @@ def _(all_tags_df):
return random.choice([-1, 0, 1]) # Random sentiment for testing
return None
all_tags_df['sentiment'] = all_tags_df.apply(lambda row: dummy_sentiment_analysis(row['content'], row['tag']), axis=1)
df['sentiment'] = df.apply(lambda row: dummy_sentiment_analysis(row['content'], row['tag']), axis=1)
all_tags_df
df
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
## Step 2b: Resolve multi-context rows
# Step 3: Process 'Other' tags
For rows that have multiple contexts (e.g., both _V-54 and _V-41), split these into separate rows for each context, removing the content and sentiment analysis for each new row. Then mark these for manual review. Use marimo's interactive notebook editing features to facilitate this process.
This ensures that each row corresponds to a single context for clearer analysis in subsequent steps. Add verification column to mark these rows for review. Run assert at the end to ensure no rows have multiple contexts and if that passes, drop the verification column.
These need to be reviewed manually for interesting content
""")
return
@app.cell
def _(all_tags_df, pd):
# Identify rows with multiple contexts (comma-separated)
all_tags_df['_needs_split'] = all_tags_df['_context'].apply(
lambda x: ',' in str(x) if pd.notna(x) else False
)
def _(mo):
mo.md(r"""
# Create expanded rows for multi-context entries
expanded_rows = []
for _, _row in all_tags_df.iterrows():
if _row['_needs_split']:
# Split the context by comma
contexts = [c.strip() for c in str(_row['_context']).split(',')]
# Create a new row for each context
for ctx in contexts:
new_row = _row.copy()
new_row['_context'] = ctx
new_row['_was_split'] = True # Mark for manual review
expanded_rows.append(new_row)
else:
# Keep single-context rows as-is
new_row = _row.copy()
new_row['_was_split'] = False
expanded_rows.append(new_row)
# Create the new dataframe
expanded_df2 = pd.DataFrame(expanded_rows).reset_index(drop=True)
# Display rows that were split for review
split_rows = expanded_df2[expanded_df2['_was_split']]
if not split_rows.empty:
split_rows
# print(f"⚠️ {len(split_rows)} rows were created from multi-context splits")
# print("These are marked with '_was_split' = True for manual review\n")
# print("Sample of split rows:")
# split_rows[['id', 'document', 'tag', '_context', 'sentiment', '_was_split']]
else:
print("✓ No multi-context rows found")
expanded_df2[expanded_df2['_was_split']]
return (expanded_df2,)
""")
return
@app.cell
def _():
# Using marimo's interactive notebook editing features, have the user manually update the sentiment values for the split rows as needed. (only for 'VT -' and 'CT -' tags)
return
@app.cell
def _(expanded_df2, pd):
# Verify no rows have multiple contexts
try:
has_comma = expanded_df2['_context'].apply(lambda x: ',' in str(x) if pd.notna(x) else False)
assert not has_comma.any(), "Some rows still have multiple contexts (comma-separated)"
# assert that all have manual checks have been completed
assert expanded_df2['_was_split'].sum() == 0, "Some rows still need manual review"
print("\n✓ Verification passed: All rows have single contexts")
# Drop verification columns since verification passed
expanded_df_final = expanded_df2.drop(columns=['_needs_split', '_was_split'])
print("✓ Verification columns dropped")
expanded_df_final
except AssertionError as e:
print(f"\n❌ Verification failed: {e}")
print("Please review the data before proceeding")
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Step 3: Create Matrices for each interview
For each interview (document), create a matrix where:
- Rows represent the different Voices/Characters (based on '_V-' and '_C-' tags)
- Columns represent the different VoiceThemes/CharacterThemes (based on 'VT -' and 'CT -' tags)
- Each cell contains the aggregated sentiment score for that Voice/Character regarding that combination
""")
return
@app.cell
def _(all_tags_df, pd):
import numpy as np
def create_sentiment_matrix(df, document_name):
"""
Create a sentiment matrix for a specific document.
Parameters:
- df: DataFrame with columns ['document', 'tag', '_context', 'sentiment']
- document_name: Name of the document to filter by
Returns:
- DataFrame representing the sentiment matrix
"""
# Filter for the specific document
doc_df = df[df['document'] == document_name].copy()
# Filter for rows that have sentiment values (VT- and CT- tags)
sentiment_rows = doc_df[doc_df['sentiment'].notna()].copy()
if sentiment_rows.empty:
print(f"No sentiment data found for document: {document_name}")
return pd.DataFrame()
# Filter for rows with valid Voice/Character context
valid_rows = sentiment_rows[
sentiment_rows['_context'].notna() &
(sentiment_rows['_context'].str.contains('_V-|_C-', na=False))
].copy()
if valid_rows.empty:
print(f"No Voice/Character context found for document: {document_name}")
return pd.DataFrame()
# Create aggregation: group by Voice/Character (_context) and Theme (tag)
# Sum sentiment scores for each combination
matrix_data = valid_rows.groupby(['_context', 'tag'])['sentiment'].sum().reset_index()
# Pivot to create the matrix
matrix = matrix_data.pivot(index='_context', columns='tag', values='sentiment')
# Fill NaN with 0 (no sentiment data for that combination)
matrix = matrix.fillna(0)
# Convert to integers for cleaner display
matrix = matrix.astype(int)
return matrix
# Create matrices for each unique document
documents = all_tags_df['document'].unique()
matrices = {}
for doc in documents:
print(f"\n{'='*60}")
print(f"Document: {doc}")
print('='*60)
matrix = create_sentiment_matrix(all_tags_df, doc)
if not matrix.empty:
matrices[doc] = matrix
print(matrix)
else:
print("No matrix data available")
# Store matrices in a variable for further analysis
matrices
return

180
03_Sentiment_Analysis.py Normal file
View File

@@ -0,0 +1,180 @@
import marimo
__generated_with = "0.18.3"
app = marimo.App(width="medium")
@app.cell
def _():
import marimo as mo
import pandas as pd
from pathlib import Path
TAGUETTE_EXPORT_DIR = Path('./data/transcripts/taguette_results')
WORKING_DIR = Path('./data/processing/03_sentiment_analysis')
if not WORKING_DIR.exists():
WORKING_DIR.mkdir(parents=True)
if not TAGUETTE_EXPORT_DIR.exists():
TAGUETTE_EXPORT_DIR.mkdir(parents=True)
return WORKING_DIR, mo, pd
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Phase 1: Individual interview analysis
- Create sentiment matrices for each interview (document)
- Save the intermediate results to file in the `WORKING_DIR`
""")
return
@app.cell
def _(pd):
import numpy as np
def create_sentiment_matrix(df, document_name, column_prefix='VT - |CT - ', row_prefix='_V-|_C-'):
"""
Create a sentiment matrix for a specific document.
Parameters:
- df: DataFrame with columns ['document', 'tag', '_context', 'sentiment']
- document_name: Name of the document to filter by
Returns:
- DataFrame representing the sentiment matrix
"""
# Filter for the specific document
doc_df = df[df['document'] == document_name].copy()
# Filter for rows where the tag matches the sentiment prefixes (VT-/CT-)
sentiment_rows = doc_df[
doc_df['tag'].str.contains(column_prefix, na=False)
].copy()
if sentiment_rows.empty:
print(f"No sentiment data found for document: {document_name}")
return pd.DataFrame()
# Filter for rows with valid Voice/Character context
valid_rows = sentiment_rows[
sentiment_rows['_context'].notna() &
(sentiment_rows['_context'].str.contains(row_prefix, na=False))
].copy()
if valid_rows.empty:
print(f"No Voice/Character context found for document: {document_name}")
return pd.DataFrame()
# Create aggregation: group by Voice/Character (_context) and Theme (tag)
# Sum sentiment scores for each combination
matrix_data = valid_rows.groupby(['_context', 'tag'])['sentiment'].sum().reset_index()
# Pivot to create the matrix
matrix = matrix_data.pivot(index='_context', columns='tag', values='sentiment')
# Fill NaN with 0 (no sentiment data for that combination)
matrix = matrix.fillna(0)
# Convert to integers for cleaner display
matrix = matrix.astype(int)
return matrix
return (create_sentiment_matrix,)
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
## Step 1.1: Voice Sample vs. Theme Sentiment Matrix
For each interview (document), create a matrix where:
- Rows represent the different Voices (based on '_V-' tags)
- Columns represent the different VoiceThemes(based on 'VT -' tags)
- Each cell contains the aggregated sentiment score (sum) for that Voice/Theme combination
""")
return
@app.cell
def _(WORKING_DIR, all_tags_df, create_sentiment_matrix, mo):
# Create matrices for each unique document
documents = all_tags_df['document'].unique()
matrices = {}
for doc in documents:
print(f"\n{'='*60}")
print(f"Document: {doc}")
print('='*60)
matrix = create_sentiment_matrix(all_tags_df, doc, column_prefix='VT - ', row_prefix='_V-')
if not matrix.empty:
matrices[doc] = matrix
print(matrix)
else:
print("No matrix data available")
# Save to CSV
timestamp = mo.utils.get_timestamp(short=True)
filename = WORKING_DIR / f"{doc.replace(' ', '_')}_voice_theme_matrix_{timestamp}.csv"
matrix.to_csv(filename)
print(f"Matrix saved to: {filename}")
# Store matrices in a variable for further analysis
matrices
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
## Step 1.2: Character Sample vs. Theme Sentiment Matrix
For each interview (document), create a matrix where:
- Rows represent the different Characters (based on '_C-' tags)
- Columns represent the different CharacterThemes (based on 'CT -' tags)
- Each cell contains the aggregated sentiment score (sum) for that Character/Theme combination
""")
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
## Step 1.3: Chase Brand Sentiment
TODO: not sure we have enough supporting data for this yet
""")
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
## Step 1.x: Save Matrices to Files
Save the matrices to CSV files in the WORKING_DIR for intermediate storage. Include a short timestamp in the filename so we can track runs.
""")
return
@app.cell
def _():
# Save the matrices to CSV files in the WORKING_DIR for intermediate storage. Include a short timestamp in the filename so we can track runs.
return
@app.cell(hide_code=True)
def _(mo):
mo.md(r"""
# Phase 2: Overall Results
Aggregate results of all the interviews into master matrices.
""")
return
if __name__ == "__main__":
app.run()

74
uv.lock generated
View File

@@ -546,7 +546,7 @@ wheels = [
[[package]]
name = "marimo"
version = "0.18.0"
version = "0.18.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -555,7 +555,7 @@ dependencies = [
{ name = "jedi" },
{ name = "loro", marker = "python_full_version < '3.14'" },
{ name = "markdown" },
{ name = "msgspec-m" },
{ name = "msgspec" },
{ name = "narwhals" },
{ name = "packaging" },
{ name = "psutil" },
@@ -567,9 +567,9 @@ dependencies = [
{ name = "uvicorn" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/69/91/7648bc680f6c583bc93bcc0034f835609d3f4ef89082f52d5022388b1a46/marimo-0.18.0.tar.gz", hash = "sha256:7a6ccd943cf817c56e8e35b7daeb67240b398d27f2f8a0647ef62c7b7e57ef27", size = 33493100, upload-time = "2025-11-20T20:51:22.898Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/87/7e01a9b61a34ce00e40b5fd1a3458ddbc648d5d9f34d7fdbc1cc19d942ee/marimo-0.18.3.tar.gz", hash = "sha256:588f227dfa04f65149dc866a62b345496fa99ecfc5a59981069faaa953adffc9", size = 37847525, upload-time = "2025-12-05T22:46:10.301Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/e5/414212416fbd0769014c6a6660bfeaaecca69baf5ecef3125204ffacc8bc/marimo-0.18.0-py3-none-any.whl", hash = "sha256:3cd46b889294edf9af57dbf9d4239b4c262ac8b26ca131278d910b991af78f1e", size = 34007479, upload-time = "2025-11-20T20:51:27.116Z" },
{ url = "https://files.pythonhosted.org/packages/98/bb/ab48a0d75de8c060d787a1f03c1b26d9339047c9c093de5ff7f35acea3dd/marimo-0.18.3-py3-none-any.whl", hash = "sha256:f460779189fbc6b0b1f63c735036f5d8269456d90a398157963f216fd5fd4e89", size = 38368675, upload-time = "2025-12-05T22:46:06.18Z" },
]
[[package]]
@@ -645,39 +645,43 @@ wheels = [
]
[[package]]
name = "msgspec-m"
version = "0.19.3"
name = "msgspec"
version = "0.20.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3b/f4/74f6ae9084690280a59b6dc6de52189b21303c505a188ef435a7eafe371e/msgspec_m-0.19.3.tar.gz", hash = "sha256:074d56f17de25e6c0f4184ecff9c163de0f612a9956260df4342f3f51d959c41", size = 319640, upload-time = "2025-11-14T21:08:03.798Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/9c/bfbd12955a49180cbd234c5d29ec6f74fe641698f0cd9df154a854fc8a15/msgspec-0.20.0.tar.gz", hash = "sha256:692349e588fde322875f8d3025ac01689fead5901e7fb18d6870a44519d62a29", size = 317862, upload-time = "2025-11-24T03:56:28.934Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/75/ae/e13adb80e3d6639e1b80048f38de477abb8db8c77e1ee3b5b23acc8ade62/msgspec_m-0.19.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:811145e8db3939e429da2ad4830ec34a1e411b538246d74ff3a3893022581abc", size = 219448, upload-time = "2025-11-14T21:07:18.252Z" },
{ url = "https://files.pythonhosted.org/packages/f0/20/70a2256a3823c807e52fa6a790f5506eb0e0403eb4af8e0db5583fe7ebeb/msgspec_m-0.19.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cff8e7f9e77383975e842b6c12413728716629b86ab5013df06fa5fc75fef3c6", size = 226219, upload-time = "2025-11-14T21:07:20.519Z" },
{ url = "https://files.pythonhosted.org/packages/97/cf/dfd632aff5d92bf96a769e8161d44f1a1abb76f13b5aae01eded709f829d/msgspec_m-0.19.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78327d975db6219b668a763e0cae5d233a40420e5e99c0d682c0801dae9b572a", size = 214686, upload-time = "2025-11-14T21:07:21.607Z" },
{ url = "https://files.pythonhosted.org/packages/ba/cb/048a96efd70be8ec8a310cfb4fa0b934e14d672b4a40145ab7d75d13be5b/msgspec_m-0.19.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5d2a75ae25770aff3e962c960a9d5cded31d40d515ed2ae8d13bf0053a0fb782", size = 221849, upload-time = "2025-11-14T21:07:24.023Z" },
{ url = "https://files.pythonhosted.org/packages/5f/bb/36ef65159d106dd6b294b300dfa66dafbc407585e6ee9afe9cb684ffe959/msgspec_m-0.19.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d65b906742858ef327f0be155adaf0276e904129aa350794e596eef944cc6a7", size = 217106, upload-time = "2025-11-14T21:07:25.138Z" },
{ url = "https://files.pythonhosted.org/packages/54/d4/f715aae677cf9f4771e903744260b07f103e2b7cf6c544bc1f006c61dd2c/msgspec_m-0.19.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:310393f144e05c380cf5466f988bee19b922a735877f8f6635e506ec03da5cce", size = 225426, upload-time = "2025-11-14T21:07:26.345Z" },
{ url = "https://files.pythonhosted.org/packages/b7/08/f2d143ca3c25ad7907e043408d4efd46af60b4942b59ddc7b351f3623455/msgspec_m-0.19.3-cp312-cp312-win_amd64.whl", hash = "sha256:2173c441951d3a55269b612826e64272c4bb1e3f18085165b6eb43ce15fa5c29", size = 188161, upload-time = "2025-11-14T21:07:27.525Z" },
{ url = "https://files.pythonhosted.org/packages/c0/a3/a2d08e39ad2aa48d448cfdcad7161cd32cdd00d1a9cc1c98a49819468755/msgspec_m-0.19.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87bccfbecd0943ac12d4b51181dd8cc53bbcd68440b12624d4c5ed349c7213fe", size = 219549, upload-time = "2025-11-14T21:07:28.617Z" },
{ url = "https://files.pythonhosted.org/packages/66/01/83a968ecc7474db9112eb0b52ba71281bf2a164b1de7e56ab7a2bc5da6dc/msgspec_m-0.19.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:900edb76d8f3f63bd0dae3be211cf71d9a9ff9c10bc538718c23dc99dae39f20", size = 226236, upload-time = "2025-11-14T21:07:29.775Z" },
{ url = "https://files.pythonhosted.org/packages/37/49/1679085328698406c147832390b76f82799c3586df82fb01c0a40fdc6501/msgspec_m-0.19.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57839618ea750037ccb95523bfaa4f8a12b2230de8685d306f0a09b3f583dc69", size = 214680, upload-time = "2025-11-14T21:07:30.967Z" },
{ url = "https://files.pythonhosted.org/packages/eb/e9/19927c79400c98ccb3be7418382d43b2c575ce88b904fc74ab69f71af852/msgspec_m-0.19.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c0286b736d8b3dff224dca88c084e8d08dae59cf821e0ef771e382e95847f22", size = 221978, upload-time = "2025-11-14T21:07:32.111Z" },
{ url = "https://files.pythonhosted.org/packages/31/d5/f76914c1b831c7621e7f0d53fa6d8140c0e674c715d1a584df0b3263d00f/msgspec_m-0.19.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7b263ca3dd507707a7a2bc82c09746d0dd5a5596e9cdb70ee140ee3eb651084f", size = 217129, upload-time = "2025-11-14T21:07:33.157Z" },
{ url = "https://files.pythonhosted.org/packages/0f/7e/31c42a50d6dab3dc6983fd2fbdb4fb6cdf61e04a6083f6a274d9bef7bc8a/msgspec_m-0.19.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9f3a4d009aa40de6c19b588f631832be1a2b24a2f8ddd9bd36d890ec5a42740", size = 225536, upload-time = "2025-11-14T21:07:34.288Z" },
{ url = "https://files.pythonhosted.org/packages/d5/38/90468da9a3af38a72d7bc4751ec62a1c812cdeb391b1f70d280c93561d1a/msgspec_m-0.19.3-cp313-cp313-win_amd64.whl", hash = "sha256:97ee5d0006ced20bb02be38aaa67ba34968f324e80ca2de2f501051f52add0fa", size = 188057, upload-time = "2025-11-14T21:07:35.434Z" },
{ url = "https://files.pythonhosted.org/packages/8c/8e/df8788b514499712d0af7e69309782952e51e188fe80b192f4e93261c8bb/msgspec_m-0.19.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:775e3d605a02204f6496cba94c95c29cb015829bdce0c09b17ee59d81465a008", size = 219652, upload-time = "2025-11-14T21:07:36.964Z" },
{ url = "https://files.pythonhosted.org/packages/59/6f/44466fad5d0e0238f2f9c0e2fdb5babfb8372b9e3a8216bc9d87d03ba3bd/msgspec_m-0.19.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6439f840cd671076bbc28aa2812642aa80f35dde6895bbcc0582e67a43c411c8", size = 225795, upload-time = "2025-11-14T21:07:38.419Z" },
{ url = "https://files.pythonhosted.org/packages/ee/77/9d22fa4ac8c3bb7aba2f0f8283eae481dff32ff022f79d428538a063f989/msgspec_m-0.19.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1c0d96afe7963213d23d39f43aa6c7a5ba8a395ed586ac38b1c356bddc18572", size = 214223, upload-time = "2025-11-14T21:07:39.968Z" },
{ url = "https://files.pythonhosted.org/packages/07/da/cdfb19f0718d6baefb669b299e4a4baea6f88412c362203784c7f28b1906/msgspec_m-0.19.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0a59c864d4062ebfb88bcbb6117f48695518be8c9d57fb883fb4f736e325cf6d", size = 221428, upload-time = "2025-11-14T21:07:41.185Z" },
{ url = "https://files.pythonhosted.org/packages/67/b2/644dfb8c56e04caf5509e08c394b19f3e4b1cf6f3de2245d51a975243245/msgspec_m-0.19.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b66484e6427c00e4c18b86ebf8dcf8948f7c7d9e9a8ffed0b79c9bae222a7370", size = 216765, upload-time = "2025-11-14T21:07:42.752Z" },
{ url = "https://files.pythonhosted.org/packages/67/33/1a01022f2324384a984990a560f48df4c030b20ad343a8b75c5fb1fba03e/msgspec_m-0.19.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:54b674576c74b886f5553d14e059f4f2ce0afef2193f217ae95d7ecb3c2468eb", size = 224549, upload-time = "2025-11-14T21:07:43.934Z" },
{ url = "https://files.pythonhosted.org/packages/46/ac/54ce237335c3288b96d2e00d38448db295bac1307aa115ba86fdf976963e/msgspec_m-0.19.3-cp314-cp314-win_amd64.whl", hash = "sha256:257ec1679ccad3f799bebcc06aece2d16cb864487ffe60008de938310024acc2", size = 192109, upload-time = "2025-11-14T21:07:45.057Z" },
{ url = "https://files.pythonhosted.org/packages/be/a5/2b815e42c397ee7ebddb713459348971e320bf4a5bf76138fddde7938aa7/msgspec_m-0.19.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:e2a42a0e14a78459d63ca9d084446c65026688294dbf57444469a0923d8bbcc9", size = 218260, upload-time = "2025-11-14T21:07:46.141Z" },
{ url = "https://files.pythonhosted.org/packages/a8/95/50142e4129d5479bd04d9fc3c7a540afac62f536a578c9fedd45446a68c0/msgspec_m-0.19.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3162adfd97d017cd8804c5f339184a169ba8d4f4a5ddec84def52b2828fa5bc7", size = 224162, upload-time = "2025-11-14T21:07:47.337Z" },
{ url = "https://files.pythonhosted.org/packages/df/17/db9e5358d60f28fa7c9c330fe7c34c360bc9c186de671d757cd495ddb64d/msgspec_m-0.19.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:247c8075ea22d1d22fbff4259c2b3c12d41029728147b70804f591997efe0a88", size = 213067, upload-time = "2025-11-14T21:07:49.346Z" },
{ url = "https://files.pythonhosted.org/packages/18/a1/1f26838070450369ccc0bc0f94bc97b920cdab4ba3451a0a6e92bf1f8543/msgspec_m-0.19.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7138397e6edcbe60bf51deeda0e49713b6eeab9397f17a3d91e9b436b35b0c1", size = 220767, upload-time = "2025-11-14T21:07:50.459Z" },
{ url = "https://files.pythonhosted.org/packages/4b/09/a59f16d32868f04beffa9771296c75f266e6320fa82c2a63968baa59bf43/msgspec_m-0.19.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b032f7c4102266a134954060b4e8d9e5329444ea0eb98befc97ed602ab00cc7", size = 214854, upload-time = "2025-11-14T21:07:51.62Z" },
{ url = "https://files.pythonhosted.org/packages/8a/0b/37d660fb997f5a5889cf96c68996431859059e283fa4bac2f02cd3e9b321/msgspec_m-0.19.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:618b4de4ad867fa3701c2d7fb0f8961397ccc61e053115ddf8605e3795376af6", size = 223432, upload-time = "2025-11-14T21:07:52.701Z" },
{ url = "https://files.pythonhosted.org/packages/52/96/ba7fed5297556f6cba1199d21c3e2e26ece78c36548985d82ca1ecf7f87b/msgspec_m-0.19.3-cp314-cp314t-win_amd64.whl", hash = "sha256:41891410a28b66d28ff89e00cfbd68f80bc54c2c61ba3393ec01662125561f18", size = 204288, upload-time = "2025-11-14T21:07:54.198Z" },
{ url = "https://files.pythonhosted.org/packages/d9/6f/1e25eee957e58e3afb2a44b94fa95e06cebc4c236193ed0de3012fff1e19/msgspec-0.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2aba22e2e302e9231e85edc24f27ba1f524d43c223ef5765bd8624c7df9ec0a5", size = 196391, upload-time = "2025-11-24T03:55:32.677Z" },
{ url = "https://files.pythonhosted.org/packages/7f/ee/af51d090ada641d4b264992a486435ba3ef5b5634bc27e6eb002f71cef7d/msgspec-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:716284f898ab2547fedd72a93bb940375de9fbfe77538f05779632dc34afdfde", size = 188644, upload-time = "2025-11-24T03:55:33.934Z" },
{ url = "https://files.pythonhosted.org/packages/49/d6/9709ee093b7742362c2934bfb1bbe791a1e09bed3ea5d8a18ce552fbfd73/msgspec-0.20.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:558ed73315efa51b1538fa8f1d3b22c8c5ff6d9a2a62eff87d25829b94fc5054", size = 218852, upload-time = "2025-11-24T03:55:35.575Z" },
{ url = "https://files.pythonhosted.org/packages/5c/a2/488517a43ccf5a4b6b6eca6dd4ede0bd82b043d1539dd6bb908a19f8efd3/msgspec-0.20.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:509ac1362a1d53aa66798c9b9fd76872d7faa30fcf89b2fba3bcbfd559d56eb0", size = 224937, upload-time = "2025-11-24T03:55:36.859Z" },
{ url = "https://files.pythonhosted.org/packages/d5/e8/49b832808aa23b85d4f090d1d2e48a4e3834871415031ed7c5fe48723156/msgspec-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1353c2c93423602e7dea1aa4c92f3391fdfc25ff40e0bacf81d34dbc68adb870", size = 222858, upload-time = "2025-11-24T03:55:38.187Z" },
{ url = "https://files.pythonhosted.org/packages/9f/56/1dc2fa53685dca9c3f243a6cbecd34e856858354e455b77f47ebd76cf5bf/msgspec-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb33b5eb5adb3c33d749684471c6a165468395d7aa02d8867c15103b81e1da3e", size = 227248, upload-time = "2025-11-24T03:55:39.496Z" },
{ url = "https://files.pythonhosted.org/packages/5a/51/aba940212c23b32eedce752896205912c2668472ed5b205fc33da28a6509/msgspec-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:fb1d934e435dd3a2b8cf4bbf47a8757100b4a1cfdc2afdf227541199885cdacb", size = 190024, upload-time = "2025-11-24T03:55:40.829Z" },
{ url = "https://files.pythonhosted.org/packages/41/ad/3b9f259d94f183daa9764fef33fdc7010f7ecffc29af977044fa47440a83/msgspec-0.20.0-cp312-cp312-win_arm64.whl", hash = "sha256:00648b1e19cf01b2be45444ba9dc961bd4c056ffb15706651e64e5d6ec6197b7", size = 175390, upload-time = "2025-11-24T03:55:42.05Z" },
{ url = "https://files.pythonhosted.org/packages/8a/d1/b902d38b6e5ba3bdddbec469bba388d647f960aeed7b5b3623a8debe8a76/msgspec-0.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c1ff8db03be7598b50dd4b4a478d6fe93faae3bd54f4f17aa004d0e46c14c46", size = 196463, upload-time = "2025-11-24T03:55:43.405Z" },
{ url = "https://files.pythonhosted.org/packages/57/b6/eff0305961a1d9447ec2b02f8c73c8946f22564d302a504185b730c9a761/msgspec-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f6532369ece217fd37c5ebcfd7e981f2615628c21121b7b2df9d3adcf2fd69b8", size = 188650, upload-time = "2025-11-24T03:55:44.761Z" },
{ url = "https://files.pythonhosted.org/packages/99/93/f2ec1ae1de51d3fdee998a1ede6b2c089453a2ee82b5c1b361ed9095064a/msgspec-0.20.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9a1697da2f85a751ac3cc6a97fceb8e937fc670947183fb2268edaf4016d1ee", size = 218834, upload-time = "2025-11-24T03:55:46.441Z" },
{ url = "https://files.pythonhosted.org/packages/28/83/36557b04cfdc317ed8a525c4993b23e43a8fbcddaddd78619112ca07138c/msgspec-0.20.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fac7e9c92eddcd24c19d9e5f6249760941485dff97802461ae7c995a2450111", size = 224917, upload-time = "2025-11-24T03:55:48.06Z" },
{ url = "https://files.pythonhosted.org/packages/8f/56/362037a1ed5be0b88aced59272442c4b40065c659700f4b195a7f4d0ac88/msgspec-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f953a66f2a3eb8d5ea64768445e2bb301d97609db052628c3e1bcb7d87192a9f", size = 222821, upload-time = "2025-11-24T03:55:49.388Z" },
{ url = "https://files.pythonhosted.org/packages/92/75/fa2370ec341cedf663731ab7042e177b3742645c5dd4f64dc96bd9f18a6b/msgspec-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:247af0313ae64a066d3aea7ba98840f6681ccbf5c90ba9c7d17f3e39dbba679c", size = 227227, upload-time = "2025-11-24T03:55:51.125Z" },
{ url = "https://files.pythonhosted.org/packages/f1/25/5e8080fe0117f799b1b68008dc29a65862077296b92550632de015128579/msgspec-0.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:67d5e4dfad52832017018d30a462604c80561aa62a9d548fc2bd4e430b66a352", size = 189966, upload-time = "2025-11-24T03:55:52.458Z" },
{ url = "https://files.pythonhosted.org/packages/79/b6/63363422153937d40e1cb349c5081338401f8529a5a4e216865decd981bf/msgspec-0.20.0-cp313-cp313-win_arm64.whl", hash = "sha256:91a52578226708b63a9a13de287b1ec3ed1123e4a088b198143860c087770458", size = 175378, upload-time = "2025-11-24T03:55:53.721Z" },
{ url = "https://files.pythonhosted.org/packages/bb/18/62dc13ab0260c7d741dda8dc7f481495b93ac9168cd887dda5929880eef8/msgspec-0.20.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:eead16538db1b3f7ec6e3ed1f6f7c5dec67e90f76e76b610e1ffb5671815633a", size = 196407, upload-time = "2025-11-24T03:55:55.001Z" },
{ url = "https://files.pythonhosted.org/packages/dd/1d/b9949e4ad6953e9f9a142c7997b2f7390c81e03e93570c7c33caf65d27e1/msgspec-0.20.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:703c3bb47bf47801627fb1438f106adbfa2998fe586696d1324586a375fca238", size = 188889, upload-time = "2025-11-24T03:55:56.311Z" },
{ url = "https://files.pythonhosted.org/packages/1e/19/f8bb2dc0f1bfe46cc7d2b6b61c5e9b5a46c62298e8f4d03bbe499c926180/msgspec-0.20.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6cdb227dc585fb109305cee0fd304c2896f02af93ecf50a9c84ee54ee67dbb42", size = 219691, upload-time = "2025-11-24T03:55:57.908Z" },
{ url = "https://files.pythonhosted.org/packages/b8/8e/6b17e43f6eb9369d9858ee32c97959fcd515628a1df376af96c11606cf70/msgspec-0.20.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27d35044dd8818ac1bd0fedb2feb4fbdff4e3508dd7c5d14316a12a2d96a0de0", size = 224918, upload-time = "2025-11-24T03:55:59.322Z" },
{ url = "https://files.pythonhosted.org/packages/1c/db/0e833a177db1a4484797adba7f429d4242585980b90882cc38709e1b62df/msgspec-0.20.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4296393a29ee42dd25947981c65506fd4ad39beaf816f614146fa0c5a6c91ae", size = 223436, upload-time = "2025-11-24T03:56:00.716Z" },
{ url = "https://files.pythonhosted.org/packages/c3/30/d2ee787f4c918fd2b123441d49a7707ae9015e0e8e1ab51aa7967a97b90e/msgspec-0.20.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:205fbdadd0d8d861d71c8f3399fe1a82a2caf4467bc8ff9a626df34c12176980", size = 227190, upload-time = "2025-11-24T03:56:02.371Z" },
{ url = "https://files.pythonhosted.org/packages/ff/37/9c4b58ff11d890d788e700b827db2366f4d11b3313bf136780da7017278b/msgspec-0.20.0-cp314-cp314-win_amd64.whl", hash = "sha256:7dfebc94fe7d3feec6bc6c9df4f7e9eccc1160bb5b811fbf3e3a56899e398a6b", size = 193950, upload-time = "2025-11-24T03:56:03.668Z" },
{ url = "https://files.pythonhosted.org/packages/e9/4e/cab707bf2fa57408e2934e5197fc3560079db34a1e3cd2675ff2e47e07de/msgspec-0.20.0-cp314-cp314-win_arm64.whl", hash = "sha256:2ad6ae36e4a602b24b4bf4eaf8ab5a441fec03e1f1b5931beca8ebda68f53fc0", size = 179018, upload-time = "2025-11-24T03:56:05.038Z" },
{ url = "https://files.pythonhosted.org/packages/4c/06/3da3fc9aaa55618a8f43eb9052453cfe01f82930bca3af8cea63a89f3a11/msgspec-0.20.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f84703e0e6ef025663dd1de828ca028774797b8155e070e795c548f76dde65d5", size = 200389, upload-time = "2025-11-24T03:56:06.375Z" },
{ url = "https://files.pythonhosted.org/packages/83/3b/cc4270a5ceab40dfe1d1745856951b0a24fd16ac8539a66ed3004a60c91e/msgspec-0.20.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7c83fc24dd09cf1275934ff300e3951b3adc5573f0657a643515cc16c7dee131", size = 193198, upload-time = "2025-11-24T03:56:07.742Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ae/4c7905ac53830c8e3c06fdd60e3cdcfedc0bbc993872d1549b84ea21a1bd/msgspec-0.20.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f13ccb1c335a124e80c4562573b9b90f01ea9521a1a87f7576c2e281d547f56", size = 225973, upload-time = "2025-11-24T03:56:09.18Z" },
{ url = "https://files.pythonhosted.org/packages/d9/da/032abac1de4d0678d99eaeadb1323bd9d247f4711c012404ba77ed6f15ca/msgspec-0.20.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17c2b5ca19f19306fc83c96d85e606d2cc107e0caeea85066b5389f664e04846", size = 229509, upload-time = "2025-11-24T03:56:10.898Z" },
{ url = "https://files.pythonhosted.org/packages/69/52/fdc7bdb7057a166f309e0b44929e584319e625aaba4771b60912a9321ccd/msgspec-0.20.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d931709355edabf66c2dd1a756b2d658593e79882bc81aae5964969d5a291b63", size = 230434, upload-time = "2025-11-24T03:56:12.48Z" },
{ url = "https://files.pythonhosted.org/packages/cb/fe/1dfd5f512b26b53043884e4f34710c73e294e7cc54278c3fe28380e42c37/msgspec-0.20.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:565f915d2e540e8a0c93a01ff67f50aebe1f7e22798c6a25873f9fda8d1325f8", size = 231758, upload-time = "2025-11-24T03:56:13.765Z" },
{ url = "https://files.pythonhosted.org/packages/97/f6/9ba7121b8e0c4e0beee49575d1dbc804e2e72467692f0428cf39ceba1ea5/msgspec-0.20.0-cp314-cp314t-win_amd64.whl", hash = "sha256:726f3e6c3c323f283f6021ebb6c8ccf58d7cd7baa67b93d73bfbe9a15c34ab8d", size = 206540, upload-time = "2025-11-24T03:56:15.029Z" },
{ url = "https://files.pythonhosted.org/packages/c8/3e/c5187de84bb2c2ca334ab163fcacf19a23ebb1d876c837f81a1b324a15bf/msgspec-0.20.0-cp314-cp314t-win_arm64.whl", hash = "sha256:93f23528edc51d9f686808a361728e903d6f2be55c901d6f5c92e44c6d546bfc", size = 183011, upload-time = "2025-11-24T03:56:16.442Z" },
]
[[package]]