added cargo files

This commit is contained in:
2026-03-03 10:57:43 -05:00
parent 478a90e01b
commit 169df46bc2
813 changed files with 227273 additions and 9 deletions

View File

@@ -0,0 +1,4 @@
# Generated by Cargo
# will have compiled files and executables
/target/
keystore.properties

4918
PinePods-0.8.2/web/src-tauri/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,37 @@
[package]
name = "app"
version = "0.1.0"
description = "Pinepods-tauri"
authors = ["Gooseberry Development"]
license = ""
repository = ""
default-run = "app"
edition = "2021"
rust-version = "1.89"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[build-dependencies]
tauri-build = { version = "2.5.1", features = [] }
[dependencies]
serde_json = "1.0.145"
serde = { version = "1.0.228", features = ["derive"] }
tauri = { version = "2.9.2", features = ["tray-icon"] }
directories = "6.0.0"
dirs = "6.0.0"
# reqwest = { version = "0.12.5", features = ["blocking", "json"] }
tokio = { version = "1.48.0", features = ["full"] }
warp = { version = "0.4.2", features = ["server"] }
ureq = "=3.1.2"
[features]
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
# DO NOT REMOVE!!
custom-protocol = ["tauri/custom-protocol"]
[lib]
name = "app_lib"
crate-type = ["staticlib", "cdylib", "rlib"]

View File

@@ -0,0 +1,3 @@
fn main() {
tauri_build::build()
}

View File

@@ -0,0 +1,22 @@
import json
import sys
def update_version(file_path, new_version):
with open(file_path, 'r') as file:
config = json.load(file)
# Update the version at the root level
config['version'] = new_version
with open(file_path, 'w') as file:
json.dump(config, file, indent=2)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: update_version.py <file_path> <new_version>")
sys.exit(1)
file_path = sys.argv[1]
new_version = sys.argv[2]
update_version(file_path, new_version)

View File

@@ -0,0 +1,51 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>com.gooseberrydevelopment.pinepods</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>GPL-3.0-only</project_license>
<name>Pinepods</name>
<summary>A complete, self-hosted podcast management system</summary>
<developer id="com.gooseberrydevelopment">
<name>Gooseberry Development</name>
</developer>
<description>
<p>
PinePods is a Rust based podcast management system that manages podcasts with multi-user support and relies on a central database with clients to connect to it. It's browser based and your podcasts and settings follow you from device to device due to everything being stored on the server. You can subscribe to podcasts and even hosts for podcasts with the help of the PodPeopleDB. It works on mobile devices and can also sync with a Nextcloud server or gpodder compatible sync server so you can use external apps like Antennapod as well! NOTE: This is the client edition of Pinepods. You must have a server to connect to in order to use this app.
</p>
<p>
Instructions for setting up your own self-hosted server can be found on the main repo
</p>
</description>
<launchable type="desktop-id">com.gooseberrydevelopment.pinepods.desktop</launchable>
<screenshots>
<screenshot type="default">

<caption>Main interface of Pinepods</caption>
</screenshot>
</screenshots>
<url type="homepage">https://github.com/madeofpendletonwool/PinePods</url>
<url type="bugtracker">https://github.com/madeofpendletonwool/PinePods/issues</url>
<url type="donation">https://github.com/sponsors/madeofpendletonwool</url>
<releases>
<release version="0.7.6" date="2025-03-23">
<description>
<p>I'm excited to announce PinePods 0.7.6, fully showing the power of the new notification system, things like episode downloads will now display percentages as they download in a brand new alert component that's been added. A worker system for background tasks that can be easily monitored on a per user basis so we can now get status of tasks as they execute was added back in version 0.7.4. Now we're starting to see what can be done with it. At this point I am confident Pinepods is the ultimate podcast archival tool. In the near future I will continue to migrate tasks over to this new system which will allow for even greater visibility into exactly what the server is doing. Think monitoring status of Nextcloud sync tasks etc.
In addition this update further improves some findings after the previous 0.7.4 update as it was a big one!</p>
</description>
</release>
</releases>
<requires>
<display_length compare="ge">360</display_length>
</requires>
<recommends>
<display_length compare="ge">548</display_length>
</recommends>
<content_rating type="oars-1.1" />
</component>

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
{}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 355 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 112 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

View File

@@ -0,0 +1,563 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use directories::ProjectDirs;
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::{HashMap, HashSet};
use std::fs;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::copy;
use std::io::Write;
use std::path::PathBuf;
use tauri::command;
fn deserialize_categories<'de, D>(deserializer: D) -> Result<HashMap<String, String>, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::{self, Visitor};
use std::fmt;
struct CategoriesVisitor;
impl<'de> Visitor<'de> for CategoriesVisitor {
type Value = HashMap<String, String>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string or a map")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
// Convert comma-separated string to HashMap
let mut map = HashMap::new();
if !value.is_empty() && value != "{}" {
for (i, category) in value.split(',').enumerate() {
map.insert(i.to_string(), category.trim().to_string());
}
}
Ok(map)
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: de::MapAccess<'de>,
{
let mut categories = HashMap::new();
while let Some((key, value)) = map.next_entry()? {
categories.insert(key, value);
}
Ok(categories)
}
}
deserializer.deserialize_any(CategoriesVisitor)
}
// Define the structure for the file entries
#[derive(Serialize, Deserialize)]
struct FileEntry {
path: String,
}
// Function to list directory contents
#[command]
async fn list_dir(path: String) -> Result<Vec<FileEntry>, String> {
let home_dir = dirs::home_dir().ok_or("Cannot find home directory")?;
let target_path = if path == "~" {
home_dir
} else {
PathBuf::from(path)
};
let mut entries = Vec::new();
for entry in fs::read_dir(target_path).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
entries.push(FileEntry {
path: entry.path().display().to_string(),
});
}
Ok(entries)
}
fn get_project_dirs() -> Result<ProjectDirs, String> {
ProjectDirs::from("com", "gooseberrydevelopment", "pinepods")
.ok_or_else(|| "Cannot determine project directories".to_string())
}
#[command]
fn get_app_dir() -> Result<String, String> {
let proj_dirs = get_project_dirs()?;
let app_dir = proj_dirs.data_dir();
if !app_dir.exists() {
fs::create_dir_all(app_dir).map_err(|e| e.to_string())?;
}
Ok(app_dir.display().to_string())
}
#[command]
async fn download_file(url: String, filename: String) -> Result<(), String> {
println!(
"Starting download_file with url: {}, filename: {}",
url, filename
);
let proj_dirs = get_project_dirs()?;
let app_dir: PathBuf = proj_dirs.data_dir().to_path_buf();
println!("App dir path: {:?}", app_dir);
if !app_dir.exists() {
println!("Creating app directory");
fs::create_dir_all(&app_dir).map_err(|e| e.to_string())?;
}
let url = url.clone();
let filename = filename.clone();
// Use tokio::task::spawn_blocking for blocking operations
tokio::task::spawn_blocking(move || {
let agent = ureq::Agent::config_builder()
.max_redirects(20)
.build()
.new_agent();
let mut response = agent.get(&url).call().map_err(|e| e.to_string())?;
let mut reader = response.body_mut().with_config().reader(); // Alternative approach
let mut file = File::create(app_dir.join(&filename)).map_err(|e| e.to_string())?;
copy(&mut reader, &mut file).map_err(|e| e.to_string())?;
Ok(())
})
.await
.map_err(|e| e.to_string())?
}
#[derive(Debug, Deserialize, Default, Clone, PartialEq, Serialize)]
#[serde(default)]
pub struct EpisodeInfo {
pub episodetitle: String,
pub podcastname: String,
pub podcastid: i32,
pub podcastindexid: Option<i64>,
pub feedurl: String, // This field exists in the response
pub episodepubdate: String,
pub episodedescription: String,
pub episodeartwork: String,
pub episodeurl: String,
pub episodeduration: i32,
pub listenduration: Option<i32>,
pub episodeid: i32,
pub completed: bool,
pub is_queued: bool,
pub is_saved: bool,
pub is_downloaded: bool,
pub downloadedlocation: Option<String>,
pub is_youtube: bool,
}
#[derive(Debug, Deserialize, Default, Clone, PartialEq, Serialize)]
#[serde(default)]
pub struct EpisodeDownload {
pub episodetitle: String,
pub podcastname: String,
pub episodepubdate: String,
pub episodedescription: String,
pub episodeartwork: String,
pub episodeurl: String,
pub episodeduration: i32,
pub listenduration: Option<i32>,
pub episodeid: i32,
pub downloadedlocation: Option<String>,
pub podcastid: i32,
pub podcastindexid: Option<i64>,
pub completed: bool,
pub queued: bool,
pub saved: bool,
pub downloaded: bool,
pub is_youtube: bool,
}
#[command]
async fn update_local_db(mut episode_info: EpisodeInfo) -> Result<(), String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_episodes.json");
// Calculate the downloaded location
let download_dir = proj_dirs
.data_dir()
.join(format!("episode_{}.mp3", episode_info.episodeid));
episode_info.downloadedlocation = Some(download_dir.to_string_lossy().into_owned());
let mut episodes = if db_path.exists() {
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
serde_json::from_str::<Vec<EpisodeInfo>>(&data).map_err(|e| e.to_string())?
} else {
Vec::new()
};
// Check if episode already exists before adding
if !episodes.iter().any(|ep| ep.episodeid == episode_info.episodeid) {
episodes.push(episode_info);
}
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&db_path)
.map_err(|e| e.to_string())?;
serde_json::to_writer(file, &episodes).map_err(|e| e.to_string())?;
Ok(())
}
#[command]
async fn remove_multiple_from_local_db(episode_ids: Vec<i32>) -> Result<(), String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_episodes.json");
let mut episodes = if db_path.exists() {
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
serde_json::from_str::<Vec<EpisodeInfo>>(&data).map_err(|e| e.to_string())?
} else {
return Ok(()); // No episodes to remove if file doesn't exist
};
// Remove episodes with matching IDs
episodes.retain(|episode| !episode_ids.contains(&episode.episodeid));
// Write updated episodes back to file
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&db_path)
.map_err(|e| e.to_string())?;
serde_json::to_writer(file, &episodes).map_err(|e| e.to_string())?;
// Delete the audio files and artwork for each episode
for episodeid in episode_ids {
let audio_file_path = proj_dirs
.data_dir()
.join(format!("episode_{}.mp3", episodeid));
let artwork_file_path = proj_dirs
.data_dir()
.join(format!("artwork_{}.jpg", episodeid));
if audio_file_path.exists() {
std::fs::remove_file(audio_file_path).map_err(|e| e.to_string())?;
}
if artwork_file_path.exists() {
std::fs::remove_file(artwork_file_path).map_err(|e| e.to_string())?;
}
}
Ok(())
}
#[command]
async fn remove_from_local_db(episodeid: i32) -> Result<(), String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_episodes.json");
let mut episodes = if db_path.exists() {
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
serde_json::from_str::<Vec<EpisodeInfo>>(&data).map_err(|e| e.to_string())?
} else {
return Ok(()); // No episodes to remove if file doesn't exist
};
episodes.retain(|episode| episode.episodeid != episodeid);
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&db_path)
.map_err(|e| e.to_string())?;
serde_json::to_writer(file, &episodes).map_err(|e| e.to_string())?;
// Delete the audio file and artwork
let audio_file_path = proj_dirs
.data_dir()
.join(format!("episode_{}.mp3", episodeid));
let artwork_file_path = proj_dirs
.data_dir()
.join(format!("artwork_{}.jpg", episodeid));
if audio_file_path.exists() {
std::fs::remove_file(audio_file_path).map_err(|e| e.to_string())?;
}
if artwork_file_path.exists() {
std::fs::remove_file(artwork_file_path).map_err(|e| e.to_string())?;
}
Ok(())
}
#[command]
async fn deduplicate_local_episodes() -> Result<(), String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_episodes.json");
if !db_path.exists() {
return Ok(());
}
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
let episodes = match serde_json::from_str::<Vec<EpisodeInfo>>(&data) {
Ok(eps) => eps,
Err(e) => {
println!("JSON parsing error: {}, resetting file", e);
std::fs::write(&db_path, "[]").map_err(|e| e.to_string())?;
return Ok(());
}
};
// Remove duplicates based on episodeid
let mut unique_episodes = Vec::new();
let mut seen_ids = HashSet::new();
for episode in episodes {
if seen_ids.insert(episode.episodeid) {
unique_episodes.push(episode);
}
}
// Write back the deduplicated episodes
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&db_path)
.map_err(|e| e.to_string())?;
serde_json::to_writer(file, &unique_episodes).map_err(|e| e.to_string())?;
Ok(())
}
#[command]
async fn get_local_episodes() -> Result<Vec<EpisodeDownload>, String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_episodes.json");
if !db_path.exists() {
return Ok(Vec::new());
}
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
println!("Raw JSON data: {}", data);
// If JSON is corrupted, reset it and return empty
let episodes = match serde_json::from_str::<Vec<EpisodeInfo>>(&data) {
Ok(eps) => eps,
Err(e) => {
println!("JSON parsing error: {}, resetting file", e);
// Reset the file to empty array
std::fs::write(&db_path, "[]").map_err(|e| e.to_string())?;
return Ok(Vec::new());
}
};
// Convert EpisodeInfo to EpisodeDownload
let converted_episodes: Vec<EpisodeDownload> = episodes
.into_iter()
.map(|ep| EpisodeDownload {
episodetitle: ep.episodetitle,
podcastname: ep.podcastname,
episodepubdate: ep.episodepubdate,
episodedescription: ep.episodedescription,
episodeartwork: ep.episodeartwork,
episodeurl: ep.episodeurl,
episodeduration: ep.episodeduration,
listenduration: ep.listenduration,
episodeid: ep.episodeid,
downloadedlocation: ep.downloadedlocation,
podcastid: ep.podcastid,
podcastindexid: ep.podcastindexid,
completed: ep.completed,
queued: ep.is_queued,
saved: ep.is_saved,
downloaded: ep.is_downloaded,
is_youtube: ep.is_youtube,
})
.collect();
Ok(converted_episodes)
}
#[command]
fn delete_file(filename: String) -> Result<(), String> {
let proj_dirs = get_project_dirs()?;
let app_dir = proj_dirs.data_dir();
let file_path = app_dir.join(filename);
if file_path.exists() {
fs::remove_file(file_path).map_err(|e| e.to_string())?;
Ok(())
} else {
Err("File does not exist".to_string())
}
}
#[command]
fn list_app_files() -> Result<Vec<FileEntry>, String> {
let proj_dirs = get_project_dirs()?;
let app_dir = proj_dirs.data_dir();
let mut entries = Vec::new();
for entry in fs::read_dir(app_dir).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
entries.push(FileEntry {
path: entry.path().display().to_string(),
});
}
Ok(entries)
}
#[derive(Deserialize, Debug, Clone, Serialize)]
pub struct PodcastDetails {
pub podcastid: i32,
pub podcastindexid: Option<i64>,
pub artworkurl: String,
pub author: String,
#[serde(deserialize_with = "deserialize_categories")]
pub categories: HashMap<String, String>,
pub description: String,
pub episodecount: i32,
pub explicit: bool,
pub feedurl: String,
pub podcastname: String,
pub userid: i32,
pub websiteurl: String,
}
#[command]
async fn update_podcast_db(podcast_details: PodcastDetails) -> Result<(), String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_podcasts.json");
let mut podcasts = if db_path.exists() {
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
serde_json::from_str::<Vec<PodcastDetails>>(&data).map_err(|e| e.to_string())?
} else {
Vec::new()
};
if !podcasts
.iter()
.any(|p| p.podcastid == podcast_details.podcastid)
{
podcasts.push(podcast_details);
}
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&db_path)
.map_err(|e| e.to_string())?;
serde_json::to_writer(file, &podcasts).map_err(|e| e.to_string())?;
Ok(())
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
#[allow(non_snake_case)]
pub struct Podcast {
pub podcastid: i32,
pub podcastindexid: Option<i64>,
pub podcastname: String,
pub artworkurl: Option<String>,
pub description: Option<String>,
pub episodecount: i32,
pub websiteurl: Option<String>,
pub feedurl: String,
pub author: Option<String>,
#[serde(deserialize_with = "deserialize_categories")]
pub categories: HashMap<String, String>,
pub explicit: bool,
// pub is_youtube: bool,
}
#[command]
async fn get_local_podcasts() -> Result<Vec<Podcast>, String> {
let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?;
let db_path = proj_dirs.data_dir().join("local_podcasts.json");
if !db_path.exists() {
return Ok(Vec::new());
}
let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?;
let podcasts = serde_json::from_str::<Vec<Podcast>>(&data).map_err(|e| e.to_string())?;
Ok(podcasts)
}
#[tauri::command]
async fn get_local_file(filepath: String) -> Result<Vec<u8>, String> {
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
let path = PathBuf::from(filepath);
let mut file = File::open(&path).map_err(|e| e.to_string())?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer).map_err(|e| e.to_string())?;
Ok(buffer)
}
#[tauri::command]
async fn start_file_server(filepath: String) -> Result<String, String> {
// Log the file path to ensure it's correct
println!("Starting file server with path: {}", filepath);
// Ensure the path exists and is accessible
if !std::path::Path::new(&filepath).exists() {
return Err(format!("File path does not exist: {}", filepath));
}
// Get the directory of the file
let file_dir = std::path::Path::new(&filepath)
.parent()
.unwrap()
.to_str()
.unwrap()
.to_string();
// Log the directory being served
println!("Serving files from directory: {}", file_dir);
// Create the warp filter to serve the directory containing the file
let file_route = warp::fs::dir(file_dir);
// Start the warp server
tokio::spawn(warp::serve(file_route).run(([127, 0, 0, 1], 3030)));
Ok("http://127.0.0.1:3030".to_string())
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![
list_dir,
get_app_dir,
download_file,
delete_file,
update_local_db,
remove_from_local_db,
remove_multiple_from_local_db,
update_podcast_db,
get_local_podcasts,
get_local_episodes,
deduplicate_local_episodes,
list_app_files,
get_local_file,
start_file_server
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View File

@@ -0,0 +1,5 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
fn main() {
app_lib::run();
}

View File

@@ -0,0 +1,70 @@
{
"build": {
"beforeBuildCommand": "",
"beforeDevCommand": "RUSTFLAGS='--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"' trunk serve",
"devUrl": "http://localhost:8080",
"frontendDist": "../dist"
},
"identifier": "com.gooseberrydevelopment.pinepods",
"productName": "Pinepods",
"version": "1.2.4",
"app": {
"trayIcon": {
"iconPath": "icons/icon.png",
"iconAsTemplate": true
},
"withGlobalTauri": true,
"security": {
"csp": null
},
"windows": [
{
"fullscreen": false,
"height": 1000,
"resizable": true,
"title": "Pinepods",
"width": 1200
}
]
},
"bundle": {
"active": true,
"category": "DeveloperTool",
"copyright": "",
"linux": {
"deb": {
"depends": [],
"files": {
"/usr/share/metainfo/com.gooseberrydevelopment.pinepods.metainfo.xml": "./com.gooseberrydevelopment.pinepods.metainfo.xml"
}
}
},
"android": {
"versionCode": 100
},
"externalBin": [],
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"longDescription": "",
"macOS": {
"entitlements": null,
"exceptionDomain": "",
"frameworks": [],
"providerShortName": null,
"signingIdentity": null
},
"resources": [],
"shortDescription": "",
"targets": "all",
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
}
}
}