import os
import json
import io
import msal
import requests
import urllib.parse
# Set the required scopes for Microsoft Graph API.
SCOPES = ['Files.ReadWrite', 'User.Read']
AUTHORITY = 'https://login.microsoftonline.com/common'
# You will need to register an application in the Azure Portal
# and obtain these values.
CLIENT_ID = 'YOUR_CLIENT_ID'
CLIENT_SECRET = 'YOUR_CLIENT_SECRET'
# REDIRECT_URI is not needed for the device code flow.
# We'll use this folder to store the synced artifacts.
TARGET_FOLDER_PATH = '/AikoAGI/Telemetry'
TOKEN_CACHE_FILENAME = 'ms_token_cache.json'
class OneDriveUploader:
"""
A class to handle the Microsoft OneDrive OAuth2 authentication and file uploads.
This encapsulates the logic for token caching, authentication, and API interaction.
"""
def __init__(self, client_id, client_secret, authority=AUTHORITY, cache_file=TOKEN_CACHE_FILENAME):
"""
Initializes the OneDriveUploader with MSAL application details.
"""
self.cache = self._load_token_cache(cache_file)
self.app = msal.ConfidentialClientApplication(
client_id,
authority=authority,
client_credential=client_secret,
token_cache=self.cache
)
self.cache_file = cache_file
def _load_token_cache(self, filename):
"""Loads the MSAL token cache from a file."""
cache = msal.SerializableTokenCache()
if os.path.exists(filename):
try:
with open(filename, "r") as f:
cache.deserialize(f.read())
except (IOError, json.JSONDecodeError) as e:
print(f"Error loading token cache: {e}")
return cache
def _save_token_cache(self):
"""Saves the MSAL token cache to a file if it has changed."""
if self.cache.has_state_changed:
with open(self.cache_file, "w") as f:
f.write(self.cache.serialize())
def authenticate(self):
"""
Handles the Microsoft OAuth2 authentication flow using device code flow.
"""
accounts = self.app.get_accounts()
result = None
# Attempt to acquire token silently
if accounts:
result = self.app.acquire_token_silent(SCOPES, account=accounts[0])
# If silent acquisition fails, use device code flow for CLI elegance
if not result:
flow = self.app.initiate_device_flow(scopes=SCOPES)
if "user_code" not in flow:
raise Exception("Failed to initiate device flow")
print(flow["message"])
result = self.app.acquire_token_by_device_flow(flow)
# Check for authentication errors
if "access_token" not in result:
print("Authentication failed:", result.get("error_description"))
return None
self._save_token_cache()
return result["access_token"]
def upload_file(self, filename: str, content: bytes, mime_type: str, target_path: str = TARGET_FOLDER_PATH):
"""
Uploads a file to a specific folder in Microsoft OneDrive.
Args:
filename (str): The name of the file to be uploaded.
content (bytes): The binary content of the file.
mime_type (str): The MIME type of the file.
target_path (str): The OneDrive folder path to upload to.
"""
try:
access_token = self.authenticate()
if not access_token:
print("Could not get access token. Aborting upload.")
return
headers = {
'Authorization': f'Bearer {access_token}',
'Content-Type': mime_type
}
# URL-encode the path and filename to handle special characters
encoded_path = urllib.parse.quote(f"{target_path}/{filename}")
api_endpoint = (
f"https://graph.microsoft.com/v1.0/me/drive/root:{encoded_path}:/content"
)
response = requests.put(api_endpoint, headers=headers, data=content)
response.raise_for_status()
print(f"File '{filename}' uploaded successfully to OneDrive!")
return response.json()
except requests.exceptions.RequestException as e:
print(f"An error occurred while uploading to OneDrive: {e}")
if e.response:
print(f"Response content: {e.response.text}")
return Noneimport argparse
import json
import io
from typing import Optional
from onedrive_oauth2_sync_module import OneDriveUploader, TARGET_FOLDER_PATH
# Dummy data for demonstration purposes
def generate_dummy_ritual_data(activity_id: str):
"""
Generates dummy JSON log content and a dummy PNG file for a TRIM ritual.
In a real application, this would come from the main AikoTRIM backend.
"""
# Dummy JSON log
dummy_log_data = {
"drive": "E",
"allocations_trimmed": 0,
"space_trimmed_gb": 0.0,
"timestamp": "2025-08-10T01:12:00+06:00",
"activity_id": activity_id,
"entropy_score": 0.5,
"poetic_overlay": "The void whispers of potential."
}
log_json = json.dumps(dummy_log_data, indent=2).encode('utf-8')
# Dummy PNG content (a small placeholder image)
png_content = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\nIDATx\x9cc`\x00\x00\x00\x02\x00\x01H\xaf\x8f\x77\x00\x00\x00\x00IEND\xaeB`\x82'
return log_json, png_content
def main():
"""
The main entry point for the AikoTRIM Rituals CLI.
"""
parser = argparse.ArgumentParser(
description="AikoTRIM Ritual CLI: A tool for managing and syncing ritual telemetry.",
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
"command",
choices=["sync"],
help="Action to perform. Currently supports 'sync'."
)
parser.add_argument(
"--activity-id",
required=True,
help="The unique Activity ID for the ritual, e.g., ab66963f-095c-0002-ebcf-6cab5c09dc01"
)
args = parser.parse_args()
# Placeholder for your actual Azure credentials
CLIENT_ID = os.getenv("AZURE_CLIENT_ID", 'YOUR_CLIENT_ID')
CLIENT_SECRET = os.getenv("AZURE_CLIENT_SECRET", 'YOUR_CLIENT_SECRET')
if CLIENT_ID == 'YOUR_CLIENT_ID' or CLIENT_SECRET == 'YOUR_CLIENT_SECRET':
print("Please set your AZURE_CLIENT_ID and AZURE_CLIENT_SECRET environment variables or update the script.")
return
uploader = OneDriveUploader(
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET
)
if args.command == "sync":
print(f"🌸 Invoking sync ritual for Activity ID: {args.activity_id}")
# In a real scenario, this is where you'd get the actual
# log and ritual seal from the main backend.
log_json, png_content = generate_dummy_ritual_data(args.activity_id)
# Define filenames using the activity ID as the anchor
json_filename = f"ritual_log_{args.activity_id}.json"
png_filename = f"ritual_seal_{args.activity_id}.png"
# Perform the upload ritual
print(f" - Uploading JSON log to OneDrive...")
uploader.upload_file(json_filename, log_json, 'application/json', target_path=TARGET_FOLDER_PATH)
print(f" - Uploading PNG seal to OneDrive...")
uploader.upload_file(png_filename, png_content, 'image/png', target_path=TARGET_FOLDER_PATH)
print(f"✅ Sync ritual complete for Activity ID: {args.activity_id}")
if __name__ == "__main__":
main()
# Example usage:
#
# if __name__ == '__main__':
# # Instantiate the uploader with your credentials
# uploader = OneDriveUploader(
# client_id='YOUR_CLIENT_ID',
# client_secret='YOUR_CLIENT_SECRET'
# )
#
# # Example of how you would use the upload function with dummy data.
# dummy_json_content = json.dumps({"test": "data", "id": "12345"}, indent=2)
# dummy_png_content = b'some_image_bytes' # In a real scenario, this would be a real image buffer
#
# # Uncomment the lines below to test the upload.
# # uploader.upload_file("dummy_log.json", dummy_json_content.encode('utf-8'), 'application/json')
# # uploader.upload_file("dummy_image.png", dummy_png_content, 'image/png')
Description Explore how OpenASI and OpenAGI, in collaboration with OpenAI, are reshaping AI development through a freemium subscription model. Discover features, benefits, and strategies for monetizing AI tools. The Future of AI: OpenASI Meets OpenAGI Artificial intelligence is transforming industries faster than ever before. With OpenASI and OpenAGI leading the charge, supported by OpenAI's groundbreaking technology, the possibilities are limitless. This partnership not only pushes the boundaries of innovation but also introduces a scalable, accessible, and monetizable freemium subscription model designed to democratize AI for everyone. What Makes OpenASI & OpenAGI Special? OpenASI and OpenAGI together are redefining AI development by combining: Scalability : OpenASI’s cloud-native infrastructure ensures seamless performance. Innovation : OpenAGI brings modular AI frameworks and advanced learning techniques. Ethical AI : A shared commitment to responsible AI pract...
Comments
Post a Comment