-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
enable the functionality of using hugepages in container Fixes: #5560 Signed-off-by: Zhongtao Hu <[email protected]>
- Loading branch information
1 parent
56641bc
commit e7fae46
Showing
14 changed files
with
282 additions
and
18 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,219 @@ | ||
// Copyright (c) 2019-2022 Alibaba Cloud | ||
// Copyright (c) 2019-2022 Ant Group | ||
// | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
use std::{ | ||
collections::HashMap, | ||
fs::File, | ||
io::{BufRead, BufReader}, | ||
}; | ||
|
||
use agent::Storage; | ||
use anyhow::{anyhow, Context, Result}; | ||
use byte_unit::Byte; | ||
use kata_sys_util::fs::get_base_name; | ||
use kata_types::mount::KATA_EPHEMERAL_VOLUME_TYPE; | ||
|
||
use crate::share_fs::EPHEMERAL_PATH; | ||
|
||
use super::Volume; | ||
|
||
const PROC_MOUNTS_FILE: &str = "/proc/mounts"; | ||
|
||
pub(crate) struct Hugepage { | ||
storage: Option<Storage>, | ||
mount: oci::Mount, | ||
} | ||
|
||
/// handle hugepage | ||
impl Hugepage { | ||
pub(crate) fn new( | ||
mount: &oci::Mount, | ||
options_map: Option<HashMap<Byte, u64>>, | ||
fs_options: Option<Vec<String>>, | ||
) -> Result<Self> { | ||
// Create mount option string | ||
let option = if let Some(page_size) = get_page_size(fs_options) { | ||
let page_size = | ||
Byte::from_str(page_size).context("failed to create Byte object from String")?; | ||
options_map | ||
.context("failed to get opetions map")? | ||
.get(&page_size) | ||
.map(|size| format!("pagesize={},size={}", page_size.get_bytes(), size)) | ||
} else { | ||
None | ||
} | ||
.context("failed to get huge page options")?; | ||
let base_name = get_base_name(mount.source.clone())? | ||
.into_string() | ||
.map_err(|e| anyhow!("{:?}", e))?; | ||
let mut mount = mount.clone(); | ||
// Set the mount source path to a path that resides inside the VM | ||
mount.source = format!("{}{}{}", EPHEMERAL_PATH, "/", base_name); | ||
// Set the mount type to "bind" | ||
mount.r#type = "bind".to_string(); | ||
|
||
// Create a storage struct so that kata agent is able to create | ||
// hugetlbfs backed volume inside the VM | ||
let storage = Storage { | ||
driver: KATA_EPHEMERAL_VOLUME_TYPE.to_string(), | ||
source: "nodev".to_string(), | ||
fs_type: "hugetlbfs".to_string(), | ||
mount_point: mount.source.clone(), | ||
options: vec![option], | ||
..Default::default() | ||
}; | ||
Ok(Self { | ||
storage: Some(storage), | ||
mount, | ||
}) | ||
} | ||
} | ||
|
||
impl Volume for Hugepage { | ||
fn get_volume_mount(&self) -> Result<Vec<oci::Mount>> { | ||
Ok(vec![self.mount.clone()]) | ||
} | ||
|
||
fn get_storage(&self) -> Result<Vec<agent::Storage>> { | ||
let s = if let Some(s) = self.storage.as_ref() { | ||
vec![s.clone()] | ||
} else { | ||
vec![] | ||
}; | ||
Ok(s) | ||
} | ||
|
||
fn cleanup(&self) -> Result<()> { | ||
todo!() | ||
} | ||
} | ||
|
||
pub(crate) fn is_huge_page(m: &oci::Mount) -> Result<(bool, Option<Vec<String>>)> { | ||
if m.source.is_empty() { | ||
return Err(anyhow!("empty mount source")); | ||
} | ||
let file = File::open(PROC_MOUNTS_FILE).context("failed open file")?; | ||
let reader = BufReader::new(file); | ||
for line in reader.lines().flatten() { | ||
let items: Vec<&str> = line.split(' ').collect(); | ||
if m.source == items[1] && items[2] == "hugetlbfs" { | ||
let fs_options: Vec<&str> = items[3].split(',').collect(); | ||
return Ok(( | ||
true, | ||
Some( | ||
fs_options | ||
.iter() | ||
.map(|&s| s.to_string()) | ||
.collect::<Vec<String>>(), | ||
), | ||
)); | ||
} | ||
} | ||
Ok((false, None)) | ||
} | ||
|
||
pub(crate) fn get_huge_page_option(spec: &oci::Spec) -> Result<Option<HashMap<Byte, u64>>> { | ||
if let Some(l) = &spec.linux { | ||
if let Some(r) = &l.resources { | ||
let hugepage_limits = r.hugepage_limits.clone(); | ||
let mut options_map: HashMap<Byte, u64> = HashMap::new(); | ||
for hugepage_limit in hugepage_limits { | ||
// the pagesize send from oci spec is MB or GB, change it to Mi and Gi | ||
let page_size = hugepage_limit.page_size.replace("B", "i"); | ||
let page_size = Byte::from_str(page_size) | ||
.context("failed to create Byte object from String")?; | ||
options_map.insert(page_size, hugepage_limit.limit); | ||
} | ||
return Ok(Some(options_map)); | ||
} | ||
return Ok(None); | ||
} | ||
Ok(None) | ||
} | ||
|
||
fn get_page_size(fs_options: Option<Vec<String>>) -> Option<String> { | ||
if let Some(fs_options) = fs_options { | ||
for fs_option in fs_options { | ||
if fs_option.starts_with("pagesize=") { | ||
return fs_option | ||
.strip_prefix("pagesize=") | ||
.map(|s| format!("{}i", s)); | ||
} | ||
} | ||
} | ||
None | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
|
||
use std::{collections::HashMap, fs}; | ||
|
||
use crate::volume::hugepage::get_page_size; | ||
|
||
use super::{get_huge_page_option, is_huge_page}; | ||
use byte_unit::Byte; | ||
use nix::mount::{mount, umount, MsFlags}; | ||
use oci::{Linux, LinuxHugepageLimit, LinuxResources}; | ||
|
||
#[test] | ||
fn test_get_huge_page_option() { | ||
let format_sizes = ["1GB", "2MB"]; | ||
let mut huge_page_limits: Vec<LinuxHugepageLimit> = vec![]; | ||
for format_size in format_sizes { | ||
huge_page_limits.push(LinuxHugepageLimit { | ||
page_size: format_size.to_string(), | ||
limit: 100000, | ||
}); | ||
} | ||
|
||
let spec = oci::Spec { | ||
linux: Some(Linux { | ||
resources: Some(LinuxResources { | ||
hugepage_limits: huge_page_limits, | ||
..Default::default() | ||
}), | ||
..Default::default() | ||
}), | ||
..Default::default() | ||
}; | ||
|
||
assert!(get_huge_page_option(&spec).unwrap().is_some()); | ||
|
||
let mut expect_res = HashMap::new(); | ||
expect_res.insert(Byte::from_str("1Gi").ok().unwrap(), 100000); | ||
expect_res.insert(Byte::from_str("2Mi").ok().unwrap(), 100000); | ||
assert_eq!(get_huge_page_option(&spec).unwrap().unwrap(), expect_res); | ||
} | ||
|
||
#[test] | ||
fn test_get_huge_page_size() { | ||
let format_sizes = ["1Gi", "2Mi"]; | ||
for format_size in format_sizes { | ||
let dir = tempfile::tempdir().unwrap(); | ||
let dst = dir.path().join(format!("hugepages-{}", format_size)); | ||
fs::create_dir_all(&dst).unwrap(); | ||
mount( | ||
Some("nodev"), | ||
&dst, | ||
Some("hugetlbfs"), | ||
MsFlags::MS_NODEV, | ||
Some(format!("pagesize={}", format_size).as_str()), | ||
) | ||
.unwrap(); | ||
let mount = oci::Mount { | ||
source: dst.to_str().unwrap().to_string(), | ||
..Default::default() | ||
}; | ||
let (res, option) = is_huge_page(&mount).unwrap(); | ||
assert!(res); | ||
let page_size = Byte::from_str(get_page_size(option).unwrap()).unwrap(); | ||
assert_eq!(page_size, Byte::from_str(format_size).unwrap()); | ||
umount(&dst).unwrap(); | ||
fs::remove_dir(&dst).unwrap(); | ||
} | ||
} | ||
} |
Oops, something went wrong.