Skip to content

Commit e64083a

Browse files
authored
ipfs: Replace dag/stat with files/stat (#3597)
* ipfs: Switch from `dag/stat` to `files/stat` * ipfs: Double check file size restriction * docker, ci: Downgrade IPFS to 0.10.0
1 parent 4c0023c commit e64083a

File tree

7 files changed

+68
-60
lines changed

7 files changed

+68
-60
lines changed

Diff for: .github/workflows/ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ jobs:
2121
runs-on: ubuntu-latest
2222
services:
2323
ipfs:
24-
image: ipfs/go-ipfs:v0.12.2
24+
image: ipfs/go-ipfs:v0.10.0
2525
ports:
2626
- 5001:5001
2727
postgres:
@@ -75,7 +75,7 @@ jobs:
7575
runs-on: ubuntu-latest
7676
services:
7777
ipfs:
78-
image: ipfs/go-ipfs:v0.12.2
78+
image: ipfs/go-ipfs:v0.10.0
7979
ports:
8080
- 5001:5001
8181
postgres:

Diff for: .travis.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ before_install:
2323
# Install Node.js 11.x
2424
- nvm install 11 && nvm use 11
2525
# Install IPFS
26-
- wget "https://dist.ipfs.io/go-ipfs/v0.12.2/go-ipfs_v0.12.2_linux-amd64.tar.gz" -O /tmp/ipfs.tar.gz
26+
- wget "https://dist.ipfs.io/go-ipfs/v0.10.0/go-ipfs_v0.10.0_linux-amd64.tar.gz" -O /tmp/ipfs.tar.gz
2727
- pushd . && cd $HOME/bin && tar -xzvf /tmp/ipfs.tar.gz && popd
2828
- export PATH="$HOME/bin/go-ipfs:$PATH"
2929
- ipfs init

Diff for: core/src/link_resolver.rs

+37-41
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use lru_time_cache::LruCache;
1212
use serde_json::Value;
1313

1414
use graph::{
15-
ipfs_client::{IpfsClient, StatApi, StatResponse},
15+
ipfs_client::{IpfsClient, StatApi},
1616
prelude::{LinkResolver as LinkResolverTrait, *},
1717
};
1818

@@ -55,7 +55,7 @@ async fn select_fastest_client_with_stat(
5555
path: String,
5656
timeout: Duration,
5757
do_retry: bool,
58-
) -> Result<(StatResponse, Arc<IpfsClient>), Error> {
58+
) -> Result<(u64, Arc<IpfsClient>), Error> {
5959
let mut err: Option<Error> = None;
6060

6161
let mut stats: FuturesUnordered<_> = clients
@@ -67,7 +67,11 @@ async fn select_fastest_client_with_stat(
6767
retry_policy(do_retry, "IPFS stat", &logger).run(move || {
6868
let path = path.clone();
6969
let c = c.cheap_clone();
70-
async move { c.stat(api, path, timeout).map_ok(move |s| (s, i)).await }
70+
async move {
71+
c.stat_size(api, path, timeout)
72+
.map_ok(move |s| (s, i))
73+
.await
74+
}
7175
})
7276
})
7377
.collect();
@@ -90,18 +94,14 @@ async fn select_fastest_client_with_stat(
9094
}
9195

9296
// Returns an error if the stat is bigger than `max_file_bytes`
93-
fn restrict_file_size(
94-
path: &str,
95-
stat: &StatResponse,
96-
max_file_bytes: &Option<u64>,
97-
) -> Result<(), Error> {
97+
fn restrict_file_size(path: &str, size: u64, max_file_bytes: &Option<u64>) -> Result<(), Error> {
9898
if let Some(max_file_bytes) = max_file_bytes {
99-
if stat.size > *max_file_bytes {
99+
if size > *max_file_bytes {
100100
return Err(anyhow!(
101101
"IPFS file {} is too large. It can be at most {} bytes but is {} bytes",
102102
path,
103103
max_file_bytes,
104-
stat.size
104+
size
105105
));
106106
}
107107
}
@@ -172,10 +172,10 @@ impl LinkResolverTrait for LinkResolver {
172172
}
173173
trace!(logger, "IPFS cache miss"; "hash" => &path);
174174

175-
let (stat, client) = select_fastest_client_with_stat(
175+
let (size, client) = select_fastest_client_with_stat(
176176
self.clients.cheap_clone(),
177177
logger.cheap_clone(),
178-
StatApi::Dag,
178+
StatApi::Files,
179179
path.clone(),
180180
self.timeout,
181181
self.retry,
@@ -184,44 +184,40 @@ impl LinkResolverTrait for LinkResolver {
184184

185185
let max_cache_file_size = self.env_vars.mappings.max_ipfs_cache_file_size;
186186
let max_file_size = self.env_vars.mappings.max_ipfs_file_bytes.map(|n| n as u64);
187-
restrict_file_size(&path, &stat, &max_file_size)?;
187+
restrict_file_size(&path, size, &max_file_size)?;
188188

189-
let path = path.clone();
190-
let this = self.clone();
189+
let req_path = path.clone();
191190
let timeout = self.timeout;
192-
let logger = logger.clone();
193191
let data = retry_policy(self.retry, "ipfs.cat", &logger)
194192
.run(move || {
195-
let path = path.clone();
193+
let path = req_path.clone();
196194
let client = client.clone();
197-
let this = this.clone();
198-
let logger = logger.clone();
199-
async move {
200-
let data = client.cat_all(path.clone(), timeout).await?.to_vec();
201-
202-
// Only cache files if they are not too large
203-
if data.len() <= max_cache_file_size {
204-
let mut cache = this.cache.lock().unwrap();
205-
if !cache.contains_key(&path) {
206-
cache.insert(path.to_owned(), data.clone());
207-
}
208-
} else {
209-
debug!(logger, "File too large for cache";
210-
"path" => path,
211-
"size" => data.len()
212-
);
213-
}
214-
Result::<Vec<u8>, reqwest::Error>::Ok(data)
215-
}
195+
async move { Ok(client.cat_all(path.clone(), timeout).await?.to_vec()) }
216196
})
217197
.await?;
218198

199+
// The size reported by `files/stat` is not guaranteed to be exact, so check the limit again.
200+
restrict_file_size(&path, data.len() as u64, &max_file_size)?;
201+
202+
// Only cache files if they are not too large
203+
if data.len() <= max_cache_file_size {
204+
let mut cache = self.cache.lock().unwrap();
205+
if !cache.contains_key(&path) {
206+
cache.insert(path.to_owned(), data.clone());
207+
}
208+
} else {
209+
debug!(logger, "File too large for cache";
210+
"path" => path,
211+
"size" => data.len()
212+
);
213+
}
214+
219215
Ok(data)
220216
}
221217

222218
async fn get_block(&self, logger: &Logger, link: &Link) -> Result<Vec<u8>, Error> {
223219
trace!(logger, "IPFS block get"; "hash" => &link.link);
224-
let (stat, client) = select_fastest_client_with_stat(
220+
let (size, client) = select_fastest_client_with_stat(
225221
self.clients.cheap_clone(),
226222
logger.cheap_clone(),
227223
StatApi::Block,
@@ -232,7 +228,7 @@ impl LinkResolverTrait for LinkResolver {
232228
.await?;
233229

234230
let max_file_size = self.env_vars.mappings.max_ipfs_file_bytes.map(|n| n as u64);
235-
restrict_file_size(&link.link, &stat, &max_file_size)?;
231+
restrict_file_size(&link.link, size, &max_file_size)?;
236232

237233
let link = link.link.clone();
238234
let data = retry_policy(self.retry, "ipfs.getBlock", &logger)
@@ -253,18 +249,18 @@ impl LinkResolverTrait for LinkResolver {
253249
// Discard the `/ipfs/` prefix (if present) to get the hash.
254250
let path = link.link.trim_start_matches("/ipfs/");
255251

256-
let (stat, client) = select_fastest_client_with_stat(
252+
let (size, client) = select_fastest_client_with_stat(
257253
self.clients.cheap_clone(),
258254
logger.cheap_clone(),
259-
StatApi::Dag,
255+
StatApi::Files,
260256
path.to_string(),
261257
self.timeout,
262258
self.retry,
263259
)
264260
.await?;
265261

266262
let max_file_size = Some(self.env_vars.mappings.max_ipfs_map_file_size as u64);
267-
restrict_file_size(path, &stat, &max_file_size)?;
263+
restrict_file_size(path, size, &max_file_size)?;
268264

269265
let mut stream = client.cat(path.to_string()).await?.fuse().boxed().compat();
270266

Diff for: docker/docker-compose.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ services:
2222
ethereum: 'mainnet:http://host.docker.internal:8545'
2323
GRAPH_LOG: info
2424
ipfs:
25-
image: ipfs/go-ipfs:v0.12.2
25+
image: ipfs/go-ipfs:v0.10.0
2626
ports:
2727
- '5001:5001'
2828
volumes:

Diff for: graph/src/ipfs_client.rs

+25-13
Original file line numberDiff line numberDiff line change
@@ -9,25 +9,31 @@ use serde::Deserialize;
99
use std::time::Duration;
1010
use std::{str::FromStr, sync::Arc};
1111

12-
#[derive(Clone, Copy)]
12+
#[derive(Clone, Copy, PartialEq, Eq)]
1313
pub enum StatApi {
1414
Block,
15-
Dag,
15+
Files,
1616
}
1717

1818
impl StatApi {
1919
fn route(&self) -> &'static str {
2020
match self {
2121
Self::Block => "block",
22-
Self::Dag => "dag",
22+
Self::Files => "files",
2323
}
2424
}
2525
}
2626

2727
#[derive(Debug, Deserialize)]
2828
#[serde(rename_all = "PascalCase")]
29-
pub struct StatResponse {
30-
pub size: u64,
29+
struct BlockStatResponse {
30+
size: u64,
31+
}
32+
33+
#[derive(Debug, Deserialize)]
34+
#[serde(rename_all = "PascalCase")]
35+
struct FilesStatResponse {
36+
cumulative_size: u64,
3137
}
3238

3339
#[derive(Debug, Deserialize)]
@@ -68,18 +74,24 @@ impl IpfsClient {
6874
}
6975
}
7076

71-
/// Calls stat for the given API route.
72-
pub async fn stat(
77+
/// Calls stat for the given API route, and returns the total size of the object.
78+
pub async fn stat_size(
7379
&self,
7480
api: StatApi,
75-
path: String,
81+
mut cid: String,
7682
timeout: Duration,
77-
) -> Result<StatResponse, reqwest::Error> {
83+
) -> Result<u64, reqwest::Error> {
7884
let route = format!("{}/stat", api.route());
79-
self.call(self.url(&route, path), None, Some(timeout))
80-
.await?
81-
.json()
82-
.await
85+
if api == StatApi::Files {
86+
// files/stat requires a leading `/ipfs/`.
87+
cid = format!("/ipfs/{}", cid);
88+
}
89+
let url = self.url(&route, cid);
90+
let res = self.call(url, None, Some(timeout)).await?;
91+
match api {
92+
StatApi::Files => Ok(res.json::<FilesStatResponse>().await?.cumulative_size),
93+
StatApi::Block => Ok(res.json::<BlockStatResponse>().await?.size),
94+
}
8395
}
8496

8597
/// Download the entire contents.

Diff for: store/test-store/devel/docker-compose.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
version: '3'
22
services:
33
ipfs:
4-
image: ipfs/go-ipfs:v0.12.2
4+
image: ipfs/go-ipfs:v0.10.0
55
ports:
66
- '5001:5001'
77
volumes:

Diff for: tests/tests/common/docker.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use tokio::time::{sleep, Duration};
77
use tokio_stream::StreamExt;
88

99
const POSTGRES_IMAGE: &'static str = "postgres:latest";
10-
const IPFS_IMAGE: &'static str = "ipfs/go-ipfs:v0.12.2";
10+
const IPFS_IMAGE: &'static str = "ipfs/go-ipfs:v0.10.0";
1111
const GANACHE_IMAGE: &'static str = "trufflesuite/ganache-cli:latest";
1212
type DockerError = bollard::errors::Error;
1313

0 commit comments

Comments
 (0)