Merge branch 'main' into storage-product-page

This commit is contained in:
Jesse Winton
2024-10-28 13:25:35 -04:00
60 changed files with 1018 additions and 31 deletions

View File

@@ -32,6 +32,13 @@ services:
- --providers.docker.exposedByDefault=false
- --entrypoints.web.address=:80
- --entrypoints.websecure.address=:443
- --entrypoints.web.transport.lifeCycle.requestAcceptGraceTimeout=60s
- --entrypoints.web.proxyProtocol.trustedIPs=10.0.0.0/8
- --entrypoints.websecure.transport.lifeCycle.requestAcceptGraceTimeout=60s
- --entrypoints.websecure.proxyProtocol.trustedIPs=10.0.0.0/8
- --entryPoints.websecure.forwardedHeaders.trustedIPs=103.21.244.0/22,103.22.200.0/22,103.31.4.0/22,104.16.0.0/13,104.24.0.0/14,108.162.192.0/18,131.0.72.0/22,141.101.64.0/18,162.158.0.0/15,172.64.0.0/13,173.245.48.0/20,188.114.96.0/20,190.93.240.0/20,197.234.240.0/22,198.41.128.0/17
- --ping
- --ping.entryPoint=web
- --entrypoints.web.http.redirections.entrypoint.to=websecure
- --entrypoints.web.http.redirections.entrypoint.scheme=https
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`,`appwrite`)

View File

@@ -43,7 +43,7 @@
<main class="contents" id="main">
<article class="web-article contents">
<header class="web-article-header">
<div class="web-article-header-start u-flex-vertical web-u-cross-start">
<div class="web-article-header-start web-u-cross-start flex flex-col">
<ul class="web-metadata web-caption-400">
{#if currentStepItem.difficulty}
<li>{currentStepItem.difficulty}</li>
@@ -52,7 +52,7 @@
<li>{currentStepItem.readtime} min</li>
{/if}
</ul>
<div class="u-position-relative u-flex u-cross-center">
<div class="u-cross-center relative flex">
<h1 class="web-title">{firstStepItem?.title}</h1>
</div>
</div>
@@ -72,7 +72,7 @@
<slot />
</div>
<div class="u-flex u-main-space-between">
<div class="flex justify-between">
{#if prevStep}
<a href={prevStep.href} class="web-button is-text previous-step-anchor">
<span class="icon-cheveron-left" aria-hidden="true" />

View File

@@ -11,7 +11,7 @@
const tag = `h${level + 1}`;
const ctx = hasContext('headings') ? getContext<LayoutContext>('headings') : undefined;
const classList: Record<typeof level, string> = {
1: 'text-label mb-4 mt-8',
1: 'text-label mb-4',
2: 'text-description mb-4',
3: 'text-body font-medium mb-4',
4: 'text-sub-body font-medium'

View File

@@ -0,0 +1,366 @@
---
layout: post
title: Building a currency converter API with Deno 2 and Appwrite
description: Learn how to build a currency converter API with Deno 2 and how to use Appwrite Functions to deploy and run your API.
date: 2024-10-18
cover: /images/blog/build-a-currency-converter-with-deno2/cover.png
timeToRead: 12
author: ebenezer-don
category: tutorial
---
When building APIs, one of the most useful things you can create is a currency converter. Whether you're working on an application that handles pricing in different currencies or something more personal like tracking expenses across borders, having a reliable currency converter is a great tool. Today, we'll walk through building one using **Deno 2** and **Appwrite**.
You'll learn how to set up your Deno project using Appwrite, how to fetch and parse JSON data with Deno, and how to use Appwrite Functions to deploy and run your API. By the end, you'll have a working API that converts currencies, provides users with detailed information on how to interact with it, and even utilizes Node.js packages like **Zod** for input validation.
Let's get started.
# Setting up your Deno project
Before we dive into the code, we need to set up our project. If you've already worked with Appwrite before, this will be familiar. If not, don't worry—I'll guide you through it.
There are two ways you can set up a Deno project with Appwrite: through the Appwrite cloud console or using the Appwrite CLI.
## To use the Appwrite Cloud console
1. Go to the [Appwrite dashboard](https://cloud.appwrite.io/).
2. Create a new project if you don't have one already.
3. Navigate to the **Functions** tab and create a new function.
4. Choose **Deno** as the runtime and set up your function configuration.
![Create a new deno function](/images/blog/build-a-currency-converter-with-deno2/deno-create-function.png)
5. Choose where you want your code to live (GitHub or manual deployment), and click **Create** to finish.
6. You can now clone/download your function and start working on it.
If you're developing locally, the [Appwrite CLI](https://appwrite.io/docs/tooling/command-line/installation) is much more convenient. It allows you to set up and manage your projects, functions, and other Appwrite resources without deploying to the cloud after every change.
## To use the Appwrite CLI
First, make sure you have docker installed and running. If you don't, you can download it from the [official Docker website](https://www.docker.com/products/docker-desktop).
Next, install the **Appwrite CLI** if you haven't already. Open your terminal and run:
```bash
npm install -g appwrite-cli@latest
```
This will install the CLI globally, so you can use the `appwrite` command from anywhere on your machine.
Now, log into your Appwrite account by running:
```bash
appwrite login
```
Once you're logged in, create a new project with:
```bash
appwrite init project
```
# Initializing the Function with Deno
With the project set up, let's create the Deno function that will handle our currency conversion. Appwrite Functions support the Deno runtime out of the box, so it's easy to get started.
To initialize the function, run the following command:
```bash
appwrite init function
```
Appwrite will ask for the name of your function, the ID, and the runtime you want to use. Go ahead and name the function something like `Currency Converter Function` and select **Deno** as the runtime.
**Deno** natively supports TypeScript, so you don't have to worry about going through extra steps to configure your project for TypeScript. Appwrite will generate a basic Deno function for you, including a `main.ts` file in the `src` directory.
You can test the function locally by running:
```bash
appwrite run function
```
This will list the available functions in your project. Select the function you just created, and you'll get a URL where you can access your function locally. You can open this URL in your browser or use `curl` to test the function:
```bash
curl 'your-function-url'
```
# Fetching exchange rates
The first feature we need for our API is fetching the exchange rates. We'll be using the **ExchangeRate API** to do this. It's a free API that provides real-time exchange rates for over 170 currencies. You can sign up for a free API key on their [website](https://www.exchangerate-api.com/).
Once you have your API key, the next step is to create a `.env` file in the root of your function and store the key there. Here's what your `.env` file should look like:
```
EXCHANGE_RATE_API_KEY=your_api_key_here
```
When you later deploy your function, you'll also need to set this environment variable in the Appwrite dashboard.
To add your API key in the Appwrite console:
1. Go to your project -> Functions.
2. Find your function and navigate to the **Settings** tab.
3. Scroll to the **Environment Variables** section and add a new environment variable with the key `EXCHANGE_RATE_API_KEY` and paste your API key as the value.
Once that's done, we can now write the function to fetch exchange rates. Update the `main.ts` file in your function with the following code:
```tsx
async function fetchExchangeRates(): Promise<{ [currency: string]: number }> {
console.log('Fetching exchange rates from API')
const apiKey = Deno.env.get('EXCHANGE_RATE_API_KEY')
const response = await fetch(
`https://v6.exchangerate-api.com/v6/${apiKey}/latest/USD`,
)
const data = await response.json()
if (data.result === 'success') {
return data.conversion_rates
} else {
throw new Error('Failed to fetch exchange rates')
}
}
```
What's happening here is straightforward. We're using `Deno.env.get()` to access the API key stored in our environment variables. This ensures the key isn't exposed in the code, which is good practice for security.
Then, we use Deno's built-in `fetch` function to call the ExchangeRate API. Unlike in Node.js, where you'd need to install a library like `node-fetch` to make HTTP requests, Deno includes this functionality natively, making things a little simpler. After fetching the data, we check if the request was successful, and if it is, we return the exchange rates. If something goes wrong, we throw an error to signal the issue.
# Converting the currency
Now that we have a way to fetch exchange rates, the next step is to convert between two currencies. For the conversion logic: we'll take an amount, the currency to convert from, and the currency to convert to, and use the exchange rates to calculate the result.
Here's the code to handle that:
```tsx
async function convertCurrency(
amount: number,
from: string,
to: string,
): Promise<{
amount: number
from: string
to: string
result: number
rate: number
timestamp: string
}> {
const rates = await fetchExchangeRates()
if (!(from in rates)) {
throw new Error(`Currency not found: ${from}`)
}
if (!(to in rates)) {
throw new Error(`Currency not found: ${to}`)
}
const fromRate = rates[from]
const toRate = rates[to]
const conversionRate = toRate / fromRate
const result = amount * conversionRate
return {
amount,
from,
to,
result: Number(result.toFixed(2)),
rate: Number(conversionRate.toFixed(6)),
timestamp: new Date().toISOString(),
}
}
```
This function uses the rates we fetched earlier to calculate the conversion. First, it checks whether the `from` and `to` currencies exist in the list of rates. If either currency is missing, we throw an error. Then, we calculate the conversion rate by dividing the `to` currency rate by the `from` currency rate. Finally, we multiply the amount by this conversion rate to get the converted amount, and return the result along with some additional information like the rate and timestamp.
# Validating user input with Zod
It's important to make sure that the data coming into our API is valid. For example, we don't want users sending negative amounts or invalid currency codes. To handle this, we'll use **Zod**, a TypeScript-first schema validation library. The great thing about **Deno 2** is that it allows us to easily use Node.js packages like Zod without extra configuration.
Let's first import Zod and define our validation schema:
```tsx
import { z } from 'npm:zod@3.21.4'
const CurrencyCodeSchema = z
.string()
.length(3)
.transform((val) => val.toUpperCase())
const ConversionRequestSchema = z.object({
amount: z.number().positive(),
from: CurrencyCodeSchema,
to: CurrencyCodeSchema,
})
```
Here, `CurrencyCodeSchema` ensures that the currency code is a string of exactly three characters and automatically converts it to uppercase. `ConversionRequestSchema` ensures that the amount is a positive number and that both the `from` and `to` currencies are valid according to `CurrencyCodeSchema`.
Using Zod in this way ensures that the data entering our API is clean and valid before we try to process it. Catching bad input early can prevent many headaches later.
# Handling API requests
Now that we've validated the user input and written the conversion logic, we need to handle incoming requests to our API. We'll create two main routes:
- **`/convert`**: To convert an amount from one currency to another.
- **`/currencies`**: To list all available currencies.
We'll also create a default route that provides some basic information about how to use the API.
## Handling currency conversion requests (`/convert`)
Let's start with the `/convert` endpoint. This is where users will send a request to convert an amount from one currency to another. The request needs to include three query parameters: `amount`, `from` (the currency to convert from), and `to` (the currency to convert to).
Add the following code to your function:
```tsx
if (req.path === '/convert' && req.method === 'GET') {
try {
const validatedInput = ConversionRequestSchema.parse({
amount: Number(req.query.amount),
from: req.query.from,
to: req.query.to,
})
const conversionResult = await convertCurrency(
validatedInput.amount,
validatedInput.from,
validatedInput.to,
)
return res.json(conversionResult)
} catch (err) {
if (err instanceof z.ZodError) {
return res.json({ error: err.errors }, 400)
}
error('Conversion failed:', err)
return res.json({ error: 'Conversion failed' }, 500)
}
}
```
Let's break down what's happening here:
1. **Checking the request path and method**: We first check if the request path is `/convert` and if it's a GET request. This ensures that we're only processing valid requests.
2. **Validating input**: We use the `ConversionRequestSchema` (which we defined earlier using Zod) to validate the incoming data. The schema ensures that:
- `amount` is a positive number.
- `from` and `to` are valid 3-letter currency codes.
If the validation fails, we return a 400 error with detailed validation errors.
3. **Performing the conversion**: If the input is valid, we call the `convertCurrency()` function with the validated data. This function handles the actual conversion logic using the exchange rates we fetched earlier.
4. **Returning the result**: Once the conversion is successful, we send the result back to the user in JSON format. If any errors occur during the process (e.g., invalid currency codes or issues with the conversion), we log the error and return a generic "conversion failed" message with a 500 status code.
This way, we're ensuring that invalid input is caught early and that the user gets meaningful feedback on what went wrong.
## Handling available currencies requests (`/currencies`)
Next, let's handle the `/currencies` endpoint. This will return a list of all available currencies that our API supports. It's a simple GET request, and we'll use the exchange rates we fetched earlier to get the list of currencies.
In your function, add the following code:
```tsx
else if (req.path === '/currencies' && req.method === 'GET') {
try {
const currencies = await getAvailableCurrencies();
return res.json(currencies);
} catch (err) {
error('Failed to fetch currencies:', err);
return res.json({ error: 'Failed to fetch currencies' }, 500);
}
}
```
Let's walk through it:
1. **Checking the request path and method**: Similar to the `/convert` endpoint, we check if the request path is `/currencies` and if it's a GET request.
2. **Fetching currencies**: We call the `getAvailableCurrencies()` function, which returns a list of all the supported currency codes based on the exchange rates.
3. **Handling errors**: If something goes wrong (e.g., issues fetching the exchange rates), we catch the error, log it, and return a 500 error message indicating that the currencies couldn't be fetched.
If everything works fine, the user will receive a list of available currencies in JSON format.
## Providing a default response
Lastly, we'll handle any requests that don't match `/convert` or `/currencies`. Instead of just returning a 404 error, it's helpful to give users a bit more information about how to use the API.
Add this to your function:
```tsx
else {
return res.json({
message: 'Welcome to the Currency Converter API.',
endpoints: {
'/convert': 'GET - Convert currency. Parameters: amount, from, to',
'/currencies': 'GET - List available currencies',
},
examples: {
convertCurrency: 'http://localhost:3000/convert?amount=100&from=USD&to=EUR',
listCurrencies: 'http://localhost:3000/currencies',
},
}, 200);
}
```
This is a simple but effective way to guide users. Here's what it does:
1. **Providing a welcome message**: The default response includes a message explaining that the API is a currency converter.
2. **Listing available endpoints**: We provide information about the two main endpoints (`/convert` and `/currencies`), along with a description of what each endpoint does.
3. **Giving examples**: To make it easier for users, we also provide example URLs they can use to test the API. This helps users understand how to format their requests correctly.
Note that you have to change `localhost:3000` to the URL of your Appwrite function or deployed API.
# Wrapping it all together
- The `/convert` endpoint handles currency conversions and validates user input to prevent errors.
- The `/currencies` endpoint provides a list of supported currencies.
- The default route offers helpful information and examples for users who may not know how to interact with the API.
With that, the API is complete and ready to be tested locally or deployed to your Appwrite instance.
# Testing your Deno API
Now that the API is ready, let's test it locally. If you already have your function running locally, you can test it by sending requests to the endpoints we created. If not, you can start the function by running:
```bash
appwrite run function
```
This will spin up your function and give you a URL where you can access it locally. Open this URL in your browser or use `curl` to test the API.
For example, to convert 100 USD to EUR, you can run:
```bash
curl 'http://localhost:3000/convert?amount=100&from=USD&to=EUR'
```
To list all available currencies, use:
```bash
curl 'http://localhost:3000/currencies'
```
Remember to change `localhost:3000` to the URL of your function if you're running it on a different port.
# Deploying your Deno API
Once you're happy with how the API works locally, you can deploy it to your Appwrite cloud project. This will make it accessible to anyone who has the URL. To deploy your function, run:
```bash
appwrite push function
```
You'll get a response asking you to choose the function you want to deploy. Select the function you just created, and Appwrite will deploy it to your cloud project. You can view the function in the Appwrite dashboard and access it using the provided URL.
![Function overview page](/images/blog/build-a-currency-converter-with-deno2/function-overview-page.png)
# Wrapping up
Congrats on building a working currency converter API using **Appwrite Functions** and **Deno 2**. Along the way, we explored how Deno's built-in TypeScript support and its straightforward integration with Node.js packages like Zod make the development process easier. You also saw how Appwrite's Functions allow you to test everything locally, ensuring smooth deployment when you're ready.
This project should give you a solid foundation for building more complex APIs in the future. If you're looking to expand this, you could add features like more advanced error handling, [authentication](https://appwrite.io/docs/products/auth) and user search history, or even integrate real-time updates for currency rates.
Feel free to experiment with and extend this API however you see fit. If you have any questions or run into issues, reach out to us on the [Appwrite Discord server](https://appwrite.io/discord) or send me a message on [LinkedIn](https://www.linkedin.com/in/ebenezerdon/).
# Further reading
- [Deno 2.0 and what it means for Appwrite Functions](https://appwrite.io/blog/post/deno-2-appwrite-functions)
- [Local serverless function development with the new Appwrite CLI](https://appwrite.io/blog/post/functions-local-development-guide)
- [Building a chat app with Appwrite and Google Gemini](https://appwrite.io/blog/post/build-a-chat-app-with-appwrite-and-gemini)

View File

@@ -0,0 +1,101 @@
---
layout: post
title: Custom backup policies for compliance and security
description: Learn why regular backups are no longer enough for data security.
date: 2024-10-16
cover: /images/blog/custom-backup-policy/cover.png
timeToRead: 5
author: aditya-oberai
category: product
---
If you're still relying on standard backups with just a 7-day retention, your company's data could be at risk. Today, data security is non-negotiable, regardless of your industry or business size. Losing it can lead to massive financial, legal and reputational setbacks.
That's why having a custom backup policy is crucial.
In this post, we'll explore the top reasons why you should consider creating a custom backup policy and how to set up a policy that works for you.
# Data prioritization & backup frequency
Not all data is created equal. Some files or databases are mission-critical and need to be backed up frequently, while others may not require the same level of urgency. A business that processes thousands of transactions per day will require frequent backups (e.g., hourly or real-time), whereas a small design agency might only need daily or weekly backups. Critical information, such as financial transactions or customer records, may need multiple backups throughout the day, while less essential data (like system logs) might only require periodic backups.
Custom backup policies allow you to prioritize critical data, reducing unnecessary backups and optimizing resource use and costs.
A **custom backup policy** lets you prioritize the data that matters most to your operations:
- **Mission-critical data**: Backed up frequently (e.g., customer transactions, financial records).
- **Moderately important data**: Backed up less often (e.g., project files, internal reports).
- **Low-priority data**: Backed up sparingly or archived (e.g., old emails, obsolete files).
By categorizing your data this way, you can ensure the most important assets are always safe, while reducing storage costs and improving backup speed.
# Compliance and industry regulations
Many industries, such as finance and healthcare, have strict compliance requirements regarding data retention. Depending on the jurisdiction and regulations, organizations may need to retain data for months or even years. A simple daily backup with a 7-day retention period would be insufficient to meet these legal requirements. Custom backup policies allow a company to comply with regulations by offering tailored backup schedules and retention periods.
Depending on your industry, there may be specific regulations that dictate how you store, back up, and protect data. For example:
- **Healthcare** organizations must comply with HIPAA regulations.
- **Financial institutions** are subject to stringent data retention and backup requirements under laws like GDPR or SOX.
**Custom backup policies** allow you to design a strategy that meets these legal requirements, ensuring that you're not only protecting your data but also staying compliant. You can set up custom retention periods, encryption protocols, and access controls specific to your industry's needs, which generic solutions may not provide.
# Business continuity and faster recovery times
Custom backup policies are crucial for maintaining business continuity in the event of a disaster. Different applications and data types need specific recovery plans, which generic solutions might not support. Tailored backup strategies ensure quick recovery times and minimize downtime, especially when historical data is needed beyond the limits of a basic daily backup.
Recovery Time Objective (RTO) and Recovery Point Objective (RPO) are two critical metrics when it comes to data recovery. These define:
- **RTO**: How quickly you need to restore data after an incident.
- **RPO**: How much data you can afford to lose (measured in time) from the moment of the last backup.
With a custom backup policy, you can define RTO and RPO based on the nature of your operations:
- For mission-critical systems, you may want an RPO of a few minutes and an RTO of near-instant recovery.
- For less essential systems, longer RPOs and RTOs might be acceptable.
Custom policies allow you to fine-tune these settings, ensuring you're not paying for more than you need or risking extended downtime that could disrupt business.
# Cost efficiency
Generic backup solutions often come with rigid pricing structures that can end up costing more than necessary, especially if you're backing up more data than needed or storing it for longer than required. Custom backup policies are essential for managing storage costs efficiently. By implementing incremental or differential backups rather than full daily backups, you can reduce storage requirements, especially in cloud environments where costs can escalate quickly. Tailoring retention periods based on data importance also helps manage overall backup expenses.
By creating a custom backup policy, you can:
- **Optimize storage** by setting up differential or incremental backups, which save only changes made since the last backup rather than duplicating everything.
- **Set specific retention policies** so that data is not held longer than required, avoiding bloated storage costs.
- **Use different backup locations** (cloud, local, offsite) depending on the data's importance and recovery requirements.
This ensures you're making the most of your budget, paying for exactly what you need, and not a penny more.
# Security and encryption
Data security is non-negotiable, especially in an age where cyber threats are on the rise. While many generic backup services offer basic security features, a custom backup policy allows you to take control over:
- **Encryption protocols** (e.g., encrypting data at rest and in transit).
- **Access control policies**, defining who can access backups and under what circumstances.
- **Geographical storage preferences**, ensuring sensitive data is stored in locations that meet your security standards or regulatory requirements.
This heightened level of control allows you to protect your data against unauthorized access, breaches, or even insider threats.
# Scalability
As businesses grow, so do their data needs. Custom backup policies provide the flexibility to scale with your organization, adapting to new data sources, applications, and compliance requirements. Additionally, they allow companies to integrate new technologies and adjust backup strategies as business processes evolve, ensuring continued data protection without being constrained by rigid systems. You might need more frequent backups, larger storage capacities, or even different security measures as your data grows in both volume and complexity.
With a tailored solution, you can adjust:
- **Backup schedules** based on changing workloads.
- **Storage capacities** to handle growing data volumes.
- **Security measures** to protect increasingly sensitive information.
This flexibility ensures your backup policy grows with your business, keeping your data protected every step of the way.
# Conclusion
A **custom backup policy** is a smart investment.
By tailoring your backup approach to your business needs, you're not just safeguarding your data— you're optimizing resources, ensuring compliance, and future-proofing your business.
Appwrite Database Backups allows you to customize your backup policy down to pre-defined schedules (e.g., daily, weekly, monthly), retention periods and even precise execution times on Pro and Scale plans. Learn more about Database Backups:
- [Appwrite Database Backups](https://appwrite.io/blog/post/introducing-database-backups)
- [Database Backups docs](https://appwrite.io/docs/products/databases/backups)
- [Should I encrypt my backups?](https://appwrite.io/blog/post/backup-encryption)

View File

@@ -0,0 +1,204 @@
---
layout: post
title: "Flutter vs React Native: Which framework is best for your app in 2024?"
description: Learn how to choose between Flutter and React Native for your next mobile app development project.
date: 2024-10-25
cover: /images/blog/flutter-vs-react-native/cover.png
timeToRead: 8
author: ebenezer-don
category: product
---
Choosing between **Flutter** and **React Native** for mobile app development is more than just comparing features. Each framework comes with its own strengths, limitations, and unique use cases, making the decision impactful in several ways. This choice affects:
- Your app's long-term performance
- Ease of maintenance
- Speed of development
Let's break down the practical differences between these two frameworks and discuss some key factors you should consider as a developer.
# What's cross-platform development about?
The core purpose of both Flutter and React Native is to solve a common problem in mobile development: managing two separate codebases for iOS and Android. Developing native apps has long been the gold standard for performance and user experience. However, the traditional native development process has some significant downsides:
- **Increased cost**: You have to maintain two separate codebases.
- **Complicated workflows**: Teams need to duplicate their efforts.
- **Feature roll-out delays**: Releasing updates across platforms can be slow.
## Early solutions and their shortcomings
Before Flutter and React Native, there were early cross-platform solutions like **Cordova** and **Ionic**. These frameworks used web technologies to create mobile apps, but they often fell short in two key areas:
- **Performance**: Web-based solutions couldn't match the speed and responsiveness of native apps.
- **User Experience**: Web technology layers made the apps feel sluggish and less polished.
These shortcomings opened the door for modern cross-platform solutions, like Flutter and React Native, which promise:
- **Native-like performance** without maintaining two codebases
- **Faster development** times by sharing most of the codebase between platforms
However, both frameworks introduce their own sets of challenges, and understanding them is key to making the right choice.
## The appeal of cross-platform development
Cross-platform frameworks like Flutter and React Native provide several attractive benefits:
- **Cost-efficiency**: A unified codebase means lower development costs.
- **Faster time to market**: Since the code is shared between platforms, you can roll out updates quicker.
- **Consistency**: Design consistency across platforms becomes easier to achieve.
However, despite these benefits, the **reality of cross-platform development** is more complex, and each framework introduces specific trade-offs. Understanding where each excels and where each struggles will help you make an informed choice.
# Choosing between Dart and JavaScript
One of the primary differences between Flutter and React Native is the programming language each uses. This choice of language significantly impacts how easy it is to onboard developers and what the long-term performance of your app will be.
- **Flutter** uses **Dart**, a relatively newer language developed by Google.
- **React Native** relies on **JavaScript**, one of the most widely used languages in web development.
## Dart (used by Flutter)
**Dart** comes with several notable benefits:
- **Ahead-of-time (AOT) compilation**: Dart compiles your code into native machine code ahead of time. This allows Flutter apps to start quickly and run efficiently, which is especially important for data-intensive apps or apps with complex animations.
- **Just-in-Time (JIT) compilation**: During development, Dart uses JIT, allowing for fast development cycles through **hot reloads**. You can see changes in your app almost instantly without restarting.
Dart's ability to compile directly into native code can significantly improve performance, especially for apps that:
- Handle a lot of data
- Use complex animations
- Require high responsiveness even under heavy loads
**Drawbacks of Dart**:
- **Learning curve**: Dart isn't as widely adopted as JavaScript, so development teams familiar with JavaScript will face a learning curve. This could slow down the initial phase of your project as developers become familiar with Dart.
## JavaScript (used by React Native)
JavaScript's main advantage is its widespread use. Most development teams are already familiar with it, which makes **React Native** a logical choice for teams with JavaScript experience.
- **Widespread adoption**: JavaScript is used in a variety of development environments, making it easier to find developers with the necessary skills.
- **Easy onboarding**: Teams already working with **React** on the web will find the transition to React Native relatively straightforward.
**Challenges with JavaScript**:
- **Performance bottlenecks**: React Native uses a **JavaScript bridge** to communicate with native components. This introduces some overhead, especially for complex user interfaces or animations. The need to pass through this bridge can slow down performance.
- **Optimization efforts**: React Native introduced the **Hermes engine** to improve startup times and optimize JavaScript execution. While this engine helps, heavy apps with many complex interactions may still require additional performance tuning.
## Key takeaways on languages
- **Go with Dart (Flutter)** if performance is a top priority and you're willing to invest in learning a new language. Dart's ability to compile into native code gives it an advantage in apps with complex functionality.
- **Choose JavaScript (React Native)** if you need to quickly get up to speed and are looking for ease of use with existing JavaScript knowledge. It's also ideal for rapid prototyping.
# **UI Control: What level of customization do you need?**
Another significant difference between Flutter and React Native is how each framework handles the **user interface (UI)**. UI design plays an important role in how users interact with your app, and each framework offers different levels of control and customization.
## Flutter's approach to UI: Impeller rendering engine
- Flutter uses the **Impeller rendering engine** by default on iOS, optimizing for smoother animations and reduced "jank" during interactions.
- Flutter provides **Material** and **Cupertino widgets** that align with Android and iOS styles, minimizing the need to mimic native behavior while allowing for a consistent UI across platforms.
This control is particularly valuable for:
- Apps that require **custom designs** or branding that doesn't conform to standard UI elements.
- Apps with **unique layouts** or complex animations that need fine-tuning.
**Downsides** of Flutter's UI approach:
- Since Flutter doesn't use native UI components directly, the app may not feel as “native” as a React Native app.
- Flutter provides **widgets** that mimic native components, but these aren't identical, and users might notice slight differences in look and behavior.
## React Native's approach to UI: Native components
- React Native uses **actual native components** to render the UI.
- On iOS, buttons are actual **UIButtons**.
- On Android, buttons are native **Buttons**.
This ensures that React Native apps feel more **integrated with the platform** and deliver a more familiar experience for users.
**Advantages** of this approach:
- Apps feel **native** from the start.
- You don't need to mimic native behavior; React Native handles that for you.
**Drawbacks**:
- For highly customized UIs, you may need to build **custom components** or use third-party libraries. This adds complexity to the project.
- The reliance on native components can sometimes limit how much control you have over the design.
## Key takeaways on UI customization
- **Flutter** offers more customization control but sacrifices some native feel.
- **React Native** provides a more natural native experience but limits your ability to customize UI elements without additional effort.
# **Ecosystem and libraries**
Both Flutter and React Native have strong ecosystems, but each has its own strengths and limitations.
## Flutter's ecosystem
- Flutter's ecosystem is **growing quickly**, supported by Google. Many packages for core functionalities, like state management and navigation, are available through **pub.dev** and receive frequent updates.
- However, since Flutter is relatively new compared to React Native, there may be **gaps in the ecosystem**, particularly for niche functionalities.
- In some cases, you might need to write custom solutions or wait for the ecosystem to catch up.
## React Native's ecosystem
- React Native benefits from the vast **npm ecosystem**, which has been around for much longer.
- You'll find packages for almost anything you need, from authentication to third-party API integration.
- **Fragmentation** can be an issue, as some packages are not actively maintained or may not work well with the latest versions of React Native.
- It can be challenging if you rely on a package that is no longer supported.
## Key takeaways on ecosystem
- React Native's larger ecosystem gives you more options but comes with the risk of fragmentation.
- Flutter's ecosystem is newer but growing rapidly, with high-quality packages backed by strong community and corporate support.
# **Development experience**
Both frameworks provide a smooth development experience, but their approaches differ slightly.
## Flutter's development experience
- **Hot reload** allows you to see changes instantly without restarting the app.
- Flutter has **excellent integration** with popular IDEs like Visual Studio Code and Android Studio.
- Debugging tools are robust, making development smooth and efficient.
## React Native's development experience
- **Hot reloading** is also available, making it easy to see changes as you code.
- For web developers familiar with **React**, the transition to React Native feels natural, thanks to shared syntax and concepts.
- However, **native code** may need to be accessed more often to optimize performance, which can slow down development if you're not familiar with the native platforms.
## Key takeaways on development
- **Flutter** offers a more integrated experience with tighter IDE support.
- **React Native** is easier for web developers but may require more native platform knowledge for optimization.
# Comparison table: Flutter vs React Native
| **Aspect** | **Flutter** | **React Native** |
|--------------------------|-------------------------------------------|------------------------------------------------------|
| **Programming Language** | Dart | JavaScript |
| **Compilation** | Ahead-of-Time (AOT), Just-in-Time (JIT) | JavaScript Bridge (with Hermes engine) |
| **Performance** | High due to native code compilation | Slower due to JavaScript bridge overhead |
| **UI Customization** | Full control (Impeller rendering engine on iOS) | Native UI components |
| **Learning Curve** | Higher (Dart is less common) | Lower (JavaScript is widely used) |
| **Ecosystem** | Growing, high-quality packages on pub.dev | Large but fragmented npm ecosystem |
| **Development Tools** | Strong IDE integration, robust debugging | Good tools, but native code often needed |
| **Hot Reload** | Yes | Yes |
| **Best for** | High performance, custom UI | Fast time-to-market, leveraging JS skills |
# Conclusion
The choice between Flutter and React Native depends on your specific project requirements and the trade-offs you're willing to make.
- **Go with Flutter** if performance is a top priority or if your app requires a custom, polished user interface. Flutter's ability to compile directly to native code gives it a performance advantage that React Native might struggle to match without significant optimizations.
- **Choose React Native** if you want to leverage your team's existing JavaScript knowledge, get to market quickly, or build an app that relies heavily on native UI components. React Native allows for fast development with a wide range of packages and tools, though it may need additional performance tuning for complex apps.
# More resources
- [Get started with Appwrite Realtime for Flutter](http://appwrite.io/blog/post/appwrite-realtime-for-flutter)
- [How to build cross-platform applications with React Native](https://appwrite.io/blog/post/building-cross-platform-applications-with-react-native)
- [SSR vs CSR with Next.js](https://appwrite.io/blog/post/csr-vs-ssr-with-nextjs)

View File

@@ -6,7 +6,7 @@ date: 2024-03-22
cover: /images/blog/goodbye-plaintext-passwords/cover.png
timeToRead: 6
author: aditya-oberai
category: authentication
category: security
---
Recently, we came across a report by [BleepingComputer](https://www.bleepingcomputer.com/news/security/misconfigured-firebase-instances-leaked-19-million-plaintext-passwords/), which shared how misconfigured Firebase projects led to the leakage of 19 million plaintext passwords on the public internet. This was primarily caused by missing or incorrectly configured security rules on Firebase instances that consequently permitted read access to databases, resulting in a massive data leak that exposed:

View File

@@ -0,0 +1,128 @@
---
layout: post
title: "A modern developers guide to user authentication"
description: Explore the essentials of user authentication with tips, best practices and common pitfalls to secure your app and improve user experience.
date: 2024-10-25
cover: /images/blog/user-authentication-guide/cover.png
timeToRead: 6
author: aditya-oberai
category: security
---
User authentication is a fundamental process that ensures only authorized individuals can access specific systems, applications, or data. Every time you log in to your email, bank account, or social media, you're going through this process.
As a developer, setting up user authentication properly is crucial to maintaining security and trust in your application. But authentication is more than just checking passwords — its about balancing security, usability, and scalability.
So, what should you know when implementing user authentication? Lets break it down.
# The basics of user authentication
At its core, **authentication** verifies a users identity by comparing the credentials they provide (such as a username and password) with a trusted data source. The goal is simple: ensure the user is who they say they are. But how you go about implementing this can range from basic to highly sophisticated.
The foundation of any authentication system relies on **secure handling of credentials** and ensuring that the process is both reliable and user-friendly. Without a strong authentication system, your app becomes an easy target for attacks.
# Authentication vs. authorization: whats the difference?
Authentication and **authorization** are often confused, but they serve distinct roles in application security.
- **Authentication** answers the question *"Who are you?"* It's the process of verifying the user's identity (e.g., logging in with a username and password or using an OAuth token).
- **Authorization** answers the question *"What are you allowed to do?"* Once the user is authenticated, authorization defines their permissions and access to resources (e.g., roles determining which API endpoints or data they can interact with).
Think of it like this: **authentication** is about getting the user through the front door (proving their identity), while **authorization** decides which areas of the building they can access (determining their level of permissions based on roles, policies, etc.).
# Common types of user authentication
There are several ways systems can authenticate users, ranging from the familiar to the more advanced.
## Password-based authentication
This is the most traditional form of authentication, where users submit a password thats compared against a hashed version stored in your database. However, passwords are vulnerable to attacks like brute force or credential stuffing, especially if users choose weak or reused passwords. This is why password-based authentication alone is no longer considered sufficient in many modern apps.
## Two-factor authentication (2FA)
2FA adds an extra layer of security beyond the password by requiring a second form of verification, typically a one-time code sent via SMS, email, or an authenticator app. As a developer, implementing 2FA means integrating libraries or services that handle generating and validating these codes, or relying on APIs. By adding this "second bouncer," you significantly reduce the risk of unauthorized access, even if the user's password is compromised.
## Biometric authentication
Biometric methods like fingerprint scanning, facial recognition, or voice ID are increasingly common, especially with mobile devices. As a developer, you can integrate biometric authentication through native device APIs (e.g., Androids Fingerprint API, iOS Face ID) or cross-platform libraries like WebAuthn for web apps. Biometric data isnt stored as raw images but as encrypted representations, adding complexity but enhancing security. Since it's hard for attackers to replicate physical traits, this method offers a strong, user-friendly alternative to passwords.
## Magic links
Magic links are a convenient passwordless authentication method, where users receive a link via email to log in. Clicking the link verifies their identity, bypassing the need for a password altogether. This method can be particularly user-friendly since it reduces password fatigue and makes the login process seamless. As a developer, implementing magic links requires setting up a secure link generator and handling token expiration to prevent unauthorized access if the link is compromised.
## OTP SMS method
With [OTP (One-Time Password) SMS](https://appwrite.io/blog/post/should-you-stop-using-otp-sms), users receive a temporary numeric code sent to their mobile device via SMS, which they enter to authenticate. This method is popular due to its simplicity and accessibility for users. To set it up, developers can leverage SMS APIs, such as Twilio, to automate OTP generation and delivery. However, ensure that codes expire quickly and avoid SMS for sensitive applications if possible, as it can be vulnerable to SIM swapping attacks.
# 7 best practices for great user authentication
Here are the key components you should consider for an outstanding and secure user authentication experience.
**Password security**:
- **Password hashing**: Never store plain-text passwords! Instead, use strong hashing algorithms like bcrypt, Argon2, or PBKDF2 to store password hashes. [Password hashing](https://appwrite.io/blog/post/password-hashing-algorithms) is essential to prevent common password attacks like rainbow table attacks.
- **Password policies**: Implement strong [password policies](https://appwrite.io/blog/post/password-protection), but avoid being too restrictive. Encourage users to create complex passwords without making them impossible to remember. For example, consider enforcing a minimum length but allow passphrases for easier memorability.
**Rate limiting and brute-force protection**:
- To prevent brute-force attacks, where an attacker repeatedly guesses passwords, implement **rate limiting** or **login throttling**. After a few failed login attempts, you can introduce a cooldown period or lock the account temporarily.
- Integrate tools like CAPTCHA for additional protection after multiple failed login attempts, ensuring you aren't sacrificing UX for security.
**Two-factor authentication (2FA)**:
- One of the easiest ways to add an extra layer of security to your app is by implementing **2FA**. This can involve sending a code via SMS, email, or using an authenticator app (e.g., Google Authenticator or Authy).
- When building 2FA, make sure to give users multiple backup options (e.g., recovery codes) in case they lose access to their second factor.
**OAuth 2.0 and OpenID Connect**:
- If you're looking to offload the complexities of authentication, consider using **OAuth 2.0** or **OpenID Connect** to enable users to log in with external providers like Google, Apple, or GitHub. [Appwrites Auth API](https://appwrite.io/docs/products/auth/oauth2) makes setting up OAuth2 quick and seamless.
- **JWT (JSON Web Tokens)** are commonly used in OAuth/OIDC to pass user claims and permissions between services. JWTs allow for stateless authentication, but remember: JWTs should be **signed** to prevent tampering and **encrypted** if sensitive data is involved.
**Single Sign-On (SSO)**:
- **SSO** allows users to authenticate once and gain access to multiple applications. For organizations, this simplifies user management and enhances security.
- Implementing SSO can greatly reduce user password fatigue and the need for maintaining separate credentials for each app. Just ensure your SSO provider is configured with the correct security settings, such as enforcing 2FA.
**Session management**:
- **Session tokens** vs. **JWTs**: Youll need to decide between traditional session tokens stored on the server (usually in a database) or JWTs, which are stateless and stored on the client side. Each has pros and cons.
- For traditional session-based authentication, store session IDs securely in HTTP-only cookies. For JWTs, ensure they are signed and ideally, encrypted, and never store them in localStorage or expose them in JavaScript to prevent XSS attacks.
- Consider implementing **automatic session expiration** or idle timeouts to improve security. Regularly rotate and invalidate tokens to prevent session hijacking.
**Secure password reset mechanisms**:
- Password reset functionality is often an easy target for attackers, so make sure the process is secure.
- When sending password reset links, ensure that the link expires after a short time and can only be used once. Also, avoid revealing whether the email entered exists in your system to prevent enumeration attacks.
# Mistakes to avoid when setting up user authentication
## **Weak password policies**
Weak passwords are a top vulnerability in authentication security. Enforce strong password policies, requiring a mix of uppercase, lowercase, numbers, and special characters, along with a minimum character length. Additionally, consider supporting multi-factor authentication (MFA) for an extra layer of security, which helps reduce the risk of unauthorized access even if passwords are compromised.
## **Storing plaintext passwords**
Storing passwords in plaintext is highly insecure. Always hash passwords before storing them, using strong hashing algorithms like **bcrypt** or **argon2**, which are designed specifically for password security. Avoid older, weaker hashing algorithms like MD5 or SHA-1, which are vulnerable to attacks. Additionally, apply a unique salt to each password to further protect against rainbow table attacks.
## **Not validating tokens properly**
Tokens, especially JWTs (JSON Web Tokens), are a popular choice in modern authentication. However, failing to validate token expiration or signature can expose applications to token-related vulnerabilities. Always check the tokens expiration date and validate the signature against your secret key to ensure authenticity. Rotate tokens periodically and, when possible, implement short expiration periods with refresh tokens to maintain security.
## **Using insecure third-party authentication providers**
While third-party providers like OAuth, OpenID Connect, or SAML simplify authentication, they must be secure and reputable. Choose providers that follow industry-standard security protocols and regularly audit their security measures. Configure scopes appropriately, granting only necessary permissions. Test third-party integrations to ensure they follow security best practices and update configurations to remain aligned with current standards.
# Final thoughts
User authentication is the first line of defense in safeguarding digital identities, and users truly value the peace of mind that comes from knowing their data is secure. As a developer, implementing proper authentication protects both your users and your app from a wide range of threats.
However, keeping up with evolving threats takes time, and building a secure authentication system from scratch might not be feasible, especially if you're also responsible for the rest of your backend.
Thats why it's often better to trust established security experts rather than reinvent the wheel. Appwrites Auth service can help you set up multiple secure authentication methods — including OAuth, email/password, magic links, and OTP — in just a few lines of code. This way, you ensure that your users are protected by industry standards without having to maintain the entire system yourself. More resources:
- [Auth docs](https://appwrite.io/docs/products/auth)
- [How to set up Sign in with Apple](https://appwrite.io/blog/post/how-to-set-up-sign-in-with-apple)
- [How to set up Google authentication in React with Appwrite](https://appwrite.io/blog/post/set-up-google-auth-appwrite-react)
- [How to implement GitHub sign-in with Appwrite](https://appwrite.io/blog/post/implement-sign-in-with-github)
- [Password protection for developers: best practices](https://appwrite.io/blog/post/password-protection)
- [Say goodbye to plaintext passwords](https://appwrite.io/blog/post/goodbye-plaintext-passwords)
- [Should you stop using OTP SMS now?](https://appwrite.io/blog/post/should-you-stop-using-otp-sms)

View File

@@ -78,13 +78,13 @@
<div class="web-big-padding-section">
<div id="form" class="web-big-padding-section-level-1 u-padding-0 u-overflow-hidden">
<div
class="web-big-padding-section-level-2 is-margin-replace-padding u-position-relative"
class="web-big-padding-section-level-2 is-margin-replace-padding relative"
>
<div class="u-position-relative">
<div class="relative">
<div class="web-container relative">
<!-- before submit -->
<div
class="u-position-relative u-z-index-1 web-grid-1-1-opt-2 u-gap-32 e-u-row-gap-0"
class="relative u-z-index-1 web-grid-1-1-opt-2 u-gap-32 e-u-row-gap-0"
>
<div>
<div

View File

@@ -24,6 +24,10 @@
label: 'GDPR',
href: '/docs/advanced/security/gdpr'
},
{
label: 'PCI',
href: '/docs/advanced/security/pci'
},
{
label: 'SOC 2',
href: '/docs/advanced/security/soc2'
@@ -33,8 +37,8 @@
href: '/docs/advanced/security/hipaa'
},
{
label: 'PCI',
href: '/docs/advanced/security/pci'
label: 'CCPA',
href: '/docs/advanced/security/ccpa'
}
]
},

View File

@@ -4,7 +4,7 @@ title: Abuse protection
description: Learn how Appwrite protects your apps from abuse through rate limiting and cross-site scripting protection.
---
Appwrite comes packaged with tools to protect against various forms of abuse, like brute force attacks, data scraping, and many
other common forms of abuse.
other common forms of abuse.
# Rate limiting {% #rate-limiting %}
Appwrite uses rate limits on some endpoints to avoid abuse or brute-force attacks against Appwrite's REST API.
@@ -22,7 +22,7 @@ Learn more about rate limits
Appwrite limits who can make requests to Appwrite's APIs by default.
This means that unless your app's domain is added to Appwrite as a platform, requests are rejected.
By being explicit with the domains that are allowed
to make requests to your Appwrite project, requests from JavaScript hosted on unknown domains
to make requests to your Appwrite project, requests from JavaScript hosted on unknown domains
will not be accepted.
You can add new platforms by navigating to **Overview** > **Platforms** > **Add platform**.
@@ -36,6 +36,7 @@ You can add new platforms by navigating to **Overview** > **Platforms** > **Add
# DDoS protection {% #ddos-protection %}
Appwrite Cloud's infrastructure is protected with always-on DDoS protection.
Appwrite's DDoS protection operates in Network (layer 3) and Transport (layer 4) layers.
This protects Appwrite's infrastructure against volumetric attacks such as UDP floods, ICMP floods, TCP floods,
and DNS reflection attacks, as well as protocol-layer attacks such as SYN floods, BGP attacks, and ping-of-death attacks.
Appwrite's DDoS protection operates across multiple layers, including the Network (layer 3), Transport (layer 4), and Application (layer 7) layers.
This comprehensive protection safeguards Appwrite's infrastructure against volumetric attacks such as UDP floods, ICMP floods, TCP floods, and DNS reflection attacks, as well as protocol-layer attacks like SYN floods, BGP attacks, and ping-of-death attacks.
Additionally, we have implemented advanced security rules that monitor traffic patterns to detect and block increased suspicious activity, ensuring the security and stability of your applications.

View File

@@ -43,6 +43,11 @@
{
label: 'Relationships',
href: '/docs/products/databases/relationships'
},
{
label: 'Backups',
new: true,
href: '/docs/products/databases/backups'
}
]
},

View File

@@ -0,0 +1,122 @@
---
layout: article
title: Backups
description: Learn how to efficiently back up your databases on Appwrite Cloud, ensuring data security and seamless recovery.
---
Appwrite Backups enable seamless, **encrypted** database backups on Cloud.
All backups are **hot** backups, ensuring zero downtime and fast recovery.
Learn how to efficiently back up your databases to ensure data security and smooth recovery.
{% info title="Backups are available on Appwrite Cloud for all Pro, Scale, and Enterprise customers." %}
{% /info %}
Appwrite Backups allow you to automate database backups using backup policies, supporting pre-defined, custom retention & other options. You can also create manual backups whenever necessary.
# Backup policies {% #backup-policies %}
Backup policies allow you to automate your backup process. The Scale and Enterprise plans allow for more customization and offer options like how often backups should occur, how long they should be retained, and when they should run.
## Creating a backup policy {% #creating-backup-policy %}
To automate your database backups, you need to create backup policies that run at scheduled intervals.
{% only_dark %}
![Create databases screen](/images/docs/databases/dark/databases.png)
{% /only_dark %}
{% only_light %}
![Create databases screen](/images/docs/databases/databases.png)
{% /only_light %}
1. In the Appwrite Console's sidebar, click **Databases**
2. Create or select & navigate to your database and click on the **Backups** Tab
3. Click on **Create Policies** & select a pre-defined policy\
&nbsp;
* On a **Pro** plan, you get access to a Daily backup policy
{% only_dark %}
![Pro plan policy](/images/docs/databases/dark/pro-policy.png)
{% /only_dark %}
{% only_light %}
![Pro plan policy](/images/docs/databases/pro-policy.png)
{% /only_light %}
* On **Scale** and **Enterprise** plans, you get access to more & custom policies\
&nbsp;
* Select a pre-defined policy
{% only_dark %}
![Scale plan policies](/images/docs/databases/dark/scale-policies.png)
{% /only_dark %}
{% only_light %}
![Scale plan policies](/images/docs/databases/scale-policies.png)
{% /only_light %}
* Or create a custom policy and adjust the settings as you like
{% only_dark %}
![Custom policies for Scale plan](/images/docs/databases/dark/scale-custom-policies.png)
{% /only_dark %}
{% only_light %}
![Custom policies for Scale plan](/images/docs/databases/scale-custom-policies.png)
{% /only_light %}
4. Click on **Create**
Your database is now set up for automated backups with just a few clicks.
Note that you can always navigate to the same tab and click **Create Manual** to create a backup on-demand.
# Manual backups {% #manual-backups %}
You can always create an on-demand backup whenever necessary.
1. In the Appwrite Console's sidebar, click **Databases**
2. Select & navigate to your database and click on the **Backups** Tab
3. Click on **Manual Backup**
Depending on the size of your database, the backup process may take some time to complete.
You can monitor its progress via the floating status bar at the bottom of your screen.
# Restoring backups {% #restoring-backups %}
To restore a database, you must have a backup of the database you want to restore.
{% only_dark %}
![Create databases screen](/images/docs/databases/dark/restore.png)
{% /only_dark %}
{% only_light %}
![Create databases screen](/images/docs/databases/restore.png)
{% /only_light %}
1. In the Appwrite Console's sidebar, click **Databases**
2. Select & navigate to your database and click on the **Backups** Tab
3. Click on the options menu in the far corner of your backup
4. In the dropdown menu, click **Restore**.
5. Enter the new database name and an optional database ID
6. Click **Restore**
Depending on the size of your database, the restoration process may take some time.
You can observe its status in a floating bar across your project.
# Backup security & performance {% #backup-security-and-performance %}
All backups created with Appwrite are:
1. **Encrypted**:
All backups are securely encrypted to ensure your data remains protected at all times.
2. **Remotely stored**:
Backups are stored in a remote location, providing an additional layer of security and ensuring your data is always recoverable.
3. **Hot backups**:
Backups are hot, meaning they occur with zero downtime, allowing you to recover data quickly without interrupting your projects and services.
# Best practices {% #best-practices %}
To ensure your backups are robust and effective, consider the following best practices:
1. **Schedule regular backups**:
Add multiple backup policies based on the frequency of database changes. Daily or weekly backups are often sufficient for most use cases.
2. **Retain critical backups longer**:
Use custom policies with longer retention to keep backups of critical data for extended periods, ensuring historical records are available when needed.
3. **Optimize backup policies based on data sensitivity**:
Tailor your backup frequency and retention settings according to the sensitivity and importance of the data.
Critical data may require more frequent backups, while less essential data can have longer retention and fewer backups.

View File

@@ -10,7 +10,7 @@ APNs will save the last message for 30 days or less and attempt delivery as soon
{% section #add-provider step=1 title="Add provider" %}
To add APNs as a provider, navigate to **Messaging** > **Providers** > {% icon icon="plus" size="m" /%} **Add provider** > **Push notification**.
To add APNs as a provider, navigate to **Messaging** > **Providers** > {% icon icon="plus" size="m" /%} **Create provider** > **Push notification**.
{% only_dark %}
![Add a FCM provider](/images/docs/messaging/providers/apns/dark/provider.png)

View File

@@ -330,7 +330,7 @@ func messaging(
}
}
}
```
Since the token is saved in `UserDefaults`, you can access it from anywhere in your app.
With this saved `fcmToken`, you can create a push target with Appwrite when the user logs in.
Each push target is associated with an account, heres an example with an email password login.

View File

@@ -135,17 +135,17 @@ Get started with Appwrite and Angular
Learn to use Appwrite by building an idea tracker app.
{% cards %}
{% cards_item href="/docs/quick-starts/react" title="React" %}
{% cards_item href="/docs/tutorials/react" title="React" %}
Get started with Appwrite and React
{% /cards_item %}
{% cards_item href="/docs/quick-starts/vue" title="Vue.js" %}
{% cards_item href="/docs/tutorials/vue" title="Vue.js" %}
Get started with Appwrite and Vue.js
{% /cards_item %}
{% cards_item href="/docs/quick-starts/nuxt" title="Nuxt" %}
{% cards_item href="/docs/tutorials/nuxt" title="Nuxt" %}
Get started with Appwrite and Nuxt
{% /cards_item %}
{% cards_item href="/docs/quick-starts/sveltekit" title="SvelteKit" %}
{% cards_item href="/docs/tutorials/sveltekit" title="SvelteKit" %}
Get started with Appwrite and SvelteKit
{% /cards_item %}
{% /cards %}
{% /section %}
{% /section %}

View File

@@ -86,7 +86,7 @@ In `src/App.jsx`, wrap the `main` element with the `IdeaProvider` component.
import { Login } from "./pages/Login";
import { Home } from "./pages/Home";
import { UserProvider } from "./lib/context/user";
import { IdeaProvider } from "./lib/context/ideas";
import { IdeasProvider } from "./lib/context/ideas";
function App() {
const isLoginPage = window.location.pathname === "/login";
@@ -94,10 +94,10 @@ function App() {
return (
<div>
<UserProvider>
<IdeaProvider>
<IdeasProvider>
<Navbar /> {/* Add the navbar before page content */}
<main>{isLoginPage ? <Login /> : <Home />}</main>
</IdeaProvider>
</IdeasProvider>
<UserProvider>
</div>
);

View File

@@ -8,13 +8,13 @@ isPartner: true
isNew: true
cover: /images/integrations/logging-raygun/cover.png
category: logging
product:
product:
avatar: '/images/integrations/avatars/raygun.png'
vendor: Raygun
description: 'Raygun is an application performance monitoring (APM) and error tracking tool designed for software developers to identify and resolve issues in their applications quickly. It provides real-time insights into software performance, error diagnostics, and user experience, helping teams to detect, diagnose, and fix errors faster. With features like crash reporting, real user monitoring, and deployment tracking, Raygun helps ensure the stability and efficiency of applications.'
platform:
platform:
- 'Self-hosted'
images:
images:
- /images/integrations/logging-raygun/cover.png
- /images/integrations/logging-raygun/api-key.png
- /images/integrations/logging-raygun/crash-reporting.png
@@ -57,7 +57,7 @@ docker compose exec appwrite vars
## Step 3: Test the provider
Once the Sentry provider is configured, run the following command in your terminal:
Once the Raygun provider is configured, run the following command in your terminal:
```bash
docker compose exec appwrite ssl --domain="wrongdomain.com"

View File

@@ -206,14 +206,14 @@
{
title: 'Backups',
free: '-',
pro: 'Daily,',
pro: 'Daily',
scale: 'Custom',
enterprise: 'Custom'
},
{
title: 'Backups retention',
free: '-',
pro: '7 days retention,',
pro: '7 days retention',
scale: 'Custom',
enterprise: 'Custom'
},

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 707 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 609 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 656 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 617 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 576 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 892 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 673 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 564 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 924 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 84 KiB

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 66 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -29,6 +29,10 @@ resource "digitalocean_project" "homepage" {
digitalocean_droplet.worker[*].urn,
digitalocean_droplet.nfs.urn
])
lifecycle {
ignore_changes = all
}
}
# Tags
@@ -93,6 +97,51 @@ resource "digitalocean_droplet" "manager" {
}
}
resource "digitalocean_loadbalancer" "public" {
name = "${var.project_name}-${var.region}-${var.environment}"
region = var.region
size_unit = 1
project_id = digitalocean_project.homepage.id
vpc_uuid = digitalocean_vpc.subnet.id
droplet_ids = digitalocean_droplet.manager.*.id
redirect_http_to_https = false
enable_backend_keepalive = true
enable_proxy_protocol = true
forwarding_rule {
entry_port = 80
entry_protocol = "http"
target_port = 80
target_protocol = "http"
}
forwarding_rule {
entry_port = 443
entry_protocol = "http2"
target_port = 443
target_protocol = "http2"
tls_passthrough = true
}
healthcheck {
port = 80
path = "/ping"
protocol = "http"
check_interval_seconds = 3
response_timeout_seconds = 3
unhealthy_threshold = 5
healthy_threshold = 5
}
firewall {
allow = ["cidr:103.21.244.0/22","cidr:103.22.200.0/22","cidr:103.31.4.0/22","cidr:104.16.0.0/13","cidr:104.24.0.0/14","cidr:108.162.192.0/18","cidr:131.0.72.0/22","cidr:141.101.64.0/18","cidr:162.158.0.0/15","cidr:172.64.0.0/13","cidr:173.245.48.0/20","cidr:188.114.96.0/20","cidr:190.93.240.0/20","cidr:197.234.240.0/22","cidr:198.41.128.0/17"]
}
}
resource "digitalocean_droplet" "worker" {
count = var.worker_count
image = var.base_image