function move_documents_to_trash
Moves specified reMarkable Cloud documents to trash by updating their metadata parent field to 'trash' and propagating changes through the document hierarchy.
/tf/active/vicechatdev/e-ink-llm/cloudtest/move_remaining_to_trash_fixed.py
30 - 220
complex
Purpose
This function is designed to programmatically move documents from the reMarkable Cloud storage to the trash folder. It handles the complete workflow of fetching document schemas, updating metadata to set parent to 'trash', uploading modified components, and updating the root document schema and roothash to reflect the changes. The function is specifically tailored for the reMarkable Cloud API v3 sync protocol and includes hardcoded document UUIDs for specific test documents.
Source Code
def move_documents_to_trash():
print("šļø Moving Remaining Documents to Trash")
print("=" * 50)
# Authenticate
auth = RemarkableAuth()
user_token = auth.authenticate()
session = auth.get_authenticated_session()
if not session or not user_token:
print("ā Authentication failed")
return
# Documents to move to trash (from our verification)
documents_to_move = [
{
'name': 'UploadTest_1753942242',
'uuid': '206f5df3-07c2-4341-8afd-2b7362aefa91',
'current_hash': '83b9d2cd210b5f7d959ff74240dd67421074aad7811369ba73ac4d0c97e3e102',
'location': 'gpt_in folder'
},
{
'name': 'FixedUpload_1753948957',
'uuid': '39056ec1-5303-4a6b-8f04-695bdcfa8869',
'current_hash': 'e26fe60d7a0fa768164530342f5a79612a5df14d65e7f8f891f88d83c154ccc8',
'location': 'root'
}
]
for doc in documents_to_move:
print(f"\nš Moving: {doc['name']}")
print(f" UUID: {doc['uuid']}")
print(f" Location: {doc['location']}")
try:
# Step 1: Fetch current document schema
doc_response = session.get(f"https://eu.tectonic.remarkable.com/sync/v3/files/{doc['current_hash']}")
doc_response.raise_for_status()
doc_content = doc_response.text
print(f" š Current DocSchema: {len(doc_content)} bytes")
# Parse components
lines = doc_content.strip().split('\n')
components = {}
for line in lines[1:]: # Skip version line
if ':' in line:
parts = line.split(':')
if len(parts) >= 5:
comp_hash = parts[0]
comp_name = parts[2]
comp_size = parts[4]
components[comp_name] = {
'hash': comp_hash,
'size': comp_size
}
# Step 2: Fetch and update metadata
metadata_name = f"{doc['uuid']}.metadata"
if metadata_name in components:
metadata_hash = components[metadata_name]['hash']
# Fetch current metadata
metadata_response = session.get(f"https://eu.tectonic.remarkable.com/sync/v3/files/{metadata_hash}")
metadata_response.raise_for_status()
metadata = json.loads(metadata_response.text)
print(f" š Current parent: '{metadata.get('parent', '')}'")
# Check if already in trash
if metadata.get('parent') == 'trash':
print(f" ā ļø Document already in trash, skipping...")
continue
# Update parent to 'trash'
metadata['parent'] = 'trash'
# Upload updated metadata
updated_metadata_json = json.dumps(metadata, separators=(',', ':'))
metadata_crc = calculate_crc32c(updated_metadata_json)
headers = {
'Authorization': f'Bearer {user_token}',
'User-Agent': 'reMarkable-desktop-win/3.11.1.1951',
'rm-filename': metadata_name,
'rm-crc32c': metadata_crc,
'Content-Type': 'application/octet-stream'
}
upload_response = session.put(
'https://eu.tectonic.remarkable.com/sync/v3/files',
headers=headers,
data=updated_metadata_json
)
upload_response.raise_for_status()
new_metadata_hash = upload_response.text.strip()
print(f" ā
Updated metadata uploaded: {new_metadata_hash[:16]}...")
# Step 3: Build new document schema
new_doc_lines = [lines[0]] # Keep version
for line in lines[1:]:
if metadata_name in line:
# Replace metadata hash and size
parts = line.split(':')
parts[0] = new_metadata_hash
parts[4] = str(len(updated_metadata_json))
new_doc_lines.append(':'.join(parts))
else:
new_doc_lines.append(line)
new_doc_content = '\n'.join(new_doc_lines)
# Upload new document schema
doc_crc = calculate_crc32c(new_doc_content)
doc_headers = {
'Authorization': f'Bearer {user_token}',
'User-Agent': 'reMarkable-desktop-win/3.11.1.1951',
'rm-filename': f'{doc["uuid"]}.docSchema',
'rm-crc32c': doc_crc,
'Content-Type': 'application/octet-stream'
}
doc_upload_response = session.put(
'https://eu.tectonic.remarkable.com/sync/v3/files',
headers=doc_headers,
data=new_doc_content
)
doc_upload_response.raise_for_status()
new_doc_hash = doc_upload_response.text.strip()
print(f" ā
New document schema uploaded: {new_doc_hash[:16]}...")
# Step 4: Update root.docSchema
print(f" š Updating root.docSchema...")
# Fetch current root
root_response = session.get('https://eu.tectonic.remarkable.com/sync/v3/files/root.docSchema')
root_response.raise_for_status()
root_content = root_response.text
# Replace old hash with new hash
updated_root_content = root_content.replace(doc['current_hash'], new_doc_hash)
# Upload updated root
root_crc = calculate_crc32c(updated_root_content)
root_headers = {
'Authorization': f'Bearer {user_token}',
'User-Agent': 'reMarkable-desktop-win/3.11.1.1951',
'rm-filename': 'root.docSchema',
'rm-crc32c': root_crc,
'Content-Type': 'application/octet-stream'
}
root_upload_response = session.put(
'https://eu.tectonic.remarkable.com/sync/v3/files',
headers=root_headers,
data=updated_root_content
)
root_upload_response.raise_for_status()
new_root_hash = root_upload_response.text.strip()
print(f" ā
Root updated: {new_root_hash[:16]}...")
# Step 5: Update roothash
roothash_response = session.put(
'https://eu.tectonic.remarkable.com/sync/v3/roothash',
headers={
'Authorization': f'Bearer {user_token}',
'User-Agent': 'reMarkable-desktop-win/3.11.1.1951',
'Content-Type': 'text/plain'
},
data=new_root_hash
)
roothash_response.raise_for_status()
print(f" ā
Roothash updated successfully")
print(f" šļø Document '{doc['name']}' moved to trash!")
else:
print(f" ā Metadata component not found")
except Exception as e:
print(f" ā Error moving document: {e}")
print("-" * 40)
print(f"\nš Trash move operation completed!")
print(f"š” You can verify the results by checking the cloud content again.")
Return Value
This function does not return any value (implicitly returns None). It performs side effects by making API calls to move documents to trash and prints status messages to console.
Dependencies
requestsjsonhashlibbase64binasciitime
Required Imports
from auth import RemarkableAuth
import json
import hashlib
import base64
import binascii
import time
Usage Example
# Ensure auth.py module exists with RemarkableAuth class
# Ensure calculate_crc32c function is defined
# Run the function to move hardcoded documents to trash
move_documents_to_trash()
# Output will show progress of moving each document:
# šļø Moving Remaining Documents to Trash
# ==================================================
#
# š Moving: UploadTest_1753942242
# UUID: 206f5df3-07c2-4341-8afd-2b7362aefa91
# Location: gpt_in folder
# š Current DocSchema: 1234 bytes
# š Current parent: 'some-parent-uuid'
# ā
Updated metadata uploaded: e26fe60d7a0fa768...
# ā
New document schema uploaded: 83b9d2cd210b5f7d...
# š Updating root.docSchema...
# ā
Root updated: a1b2c3d4e5f6g7h8...
# ā
Roothash updated successfully
# šļø Document 'UploadTest_1753942242' moved to trash!
Best Practices
- This function contains hardcoded document UUIDs and hashes - it should be refactored to accept documents as parameters for reusability
- The function makes multiple sequential API calls without retry logic - consider adding error handling and retry mechanisms for production use
- Authentication is performed within the function - consider passing authenticated session as parameter for better separation of concerns
- The function modifies the root.docSchema and roothash which affects the entire document tree - ensure proper backup before running
- The calculate_crc32c function must be defined elsewhere in the codebase - ensure it's available before calling this function
- The function assumes EU region endpoint (eu.tectonic.remarkable.com) - may need region detection for global use
- Consider adding a dry-run mode to preview changes before actually moving documents
- The function skips documents already in trash - this is good defensive programming
- Error handling is present but could be more granular to handle specific API error codes
- The function prints progress to console - consider using logging module for production environments
Tags
Similar Components
AI-powered semantic similarity - components with related functionality:
-
function move_document_to_trash 94.6% similar
-
class DocumentToTrashMover 89.8% similar
-
function apply_working_trash_move 86.4% similar
-
function simple_move_to_trash 85.1% similar
-
function main_v45 75.8% similar