From 7be889ae19caca7087445580ef4621ea8dcfe768 Mon Sep 17 00:00:00 2001
From: Pratyush Mishra <pratyushmishra@berkeley.edu>
Date: Wed, 15 Feb 2023 08:31:45 -0800
Subject: [PATCH] Upgrade dependencies to 0.4 (#112)

* intermediate commit

* building passing

* tests passing

* cargo fmt

* Address comments

* Improve checks

* Format

* Fix CI for no-std

* Update CHANGELOG

* Fix no-std

Co-authored-by: Marcin Gorny <marcin.gorny.94@protonmail.com>

* Revert CI change

* Fix CI for merge groups

---------

Co-authored-by: nikkolasg <nikkolasg@gmail.com>
Co-authored-by: Nicolas Gailly <nikkolasg@users.noreply.github.com>
Co-authored-by: Marcin Gorny <marcin.gorny.94@protonmail.com>
---
 .github/workflows/ci.yml                      |   1 +
 CHANGELOG.md                                  |  13 +-
 Cargo.toml                                    |  63 +--
 src/challenge.rs                              |   2 +-
 src/constraints.rs                            |   2 +-
 src/data_structures.rs                        |  25 +-
 src/ipa_pc/data_structures.rs                 |  35 +-
 src/ipa_pc/mod.rs                             |  68 ++-
 src/kzg10/data_structures.rs                  | 395 ++++++----------
 src/kzg10/mod.rs                              | 137 +++---
 src/lib.rs                                    |  11 +-
 src/marlin/marlin_pc/data_structures.rs       |  50 +-
 src/marlin/marlin_pc/mod.rs                   |  86 ++--
 src/marlin/marlin_pst13_pc/data_structures.rs | 440 +++++++-----------
 src/marlin/marlin_pst13_pc/mod.rs             | 170 +++----
 src/marlin/mod.rs                             |  87 ++--
 src/multilinear_pc/data_structures.rs         |  18 +-
 src/multilinear_pc/mod.rs                     | 101 ++--
 src/sonic_pc/data_structures.rs               | 194 +++-----
 src/sonic_pc/mod.rs                           | 151 +++---
 src/streaming_kzg/mod.rs                      |  73 ++-
 src/streaming_kzg/space.rs                    |  64 +--
 src/streaming_kzg/time.rs                     |  60 ++-
 23 files changed, 980 insertions(+), 1266 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0f4be64..f1d55e8 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,5 +1,6 @@
 name: CI
 on:
+  merge_group:
   pull_request:
   push:
     branches:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9203dcd..1a7ac4d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,12 +4,13 @@
 
 ### Breaking changes
 
-- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Function parameter `opening_challenge: F` for `open`,
-  `check`,  has been changed from `F` to `opening_challenges: &mut ChallengeGenerator`.
+- [\#112](https://github.com/arkworks-rs/poly-commit/pull/112) Upgrade all dependencies to `0.4`.
+- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Argument `opening_challenge: F` for `open`,
+  `check`, has been changed from `F` to `opening_challenges: &mut ChallengeGenerator`.
 
 ### Features
 
-- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Add multivariate opening challenge strategy. Integrate with sponge API. 
+- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Add multivariate opening challenge strategy. Integrate with sponge API.
 
 ### Improvements
 
@@ -19,7 +20,7 @@
 
 ### Breaking changes
 
-- [\#78](https://github.com/arkworks-rs/poly-commit/pull/78) Fix MarlinPC's CommitterKey to return the correct `supported_degree`.
+- [\#78](https://github.com/arkworks-rs/poly-commit/pull/78) Fix `MarlinPC`'s `CommitterKey` to return the correct `supported_degree`.
 
 ### Features
 
@@ -27,6 +28,6 @@
 
 ### Bug fixes
 
-## v0.2.0 
+## v0.2.0
 
-- initial release of `ark-poly-commit`.
\ No newline at end of file
+- Initial release of `ark-poly-commit`.
diff --git a/Cargo.toml b/Cargo.toml
index 98daed2..aec7724 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,16 +1,6 @@
 [package]
 name = "ark-poly-commit"
-version = "0.3.0"
-authors = [
-  "Alessandro Chiesa <alexch@berkeley.edu>",
-  "Mary Maller <mary.maller.15@ucl.ac.uk>",
-  "Yuncong Hu <huyuncongh@gmail.com>",
-  "William Lin",
-  "Pratyush Mishra <pratyush@berkeley.edu>",
-  "Noah Vesely <noah.vesely.18@ucl.ac.uk>",
-  "Nicholas Ward <npward@berkeley.edu>",
-  "arkworks contributors"
-]
+version = "0.4.0"
 description = "A library for constructing polynomial commitment schemes for use in zkSNARKs"
 repository = "https://github.com/arkworks-rs/poly-commit"
 documentation = "https://docs.rs/ark-poly-commit/"
@@ -21,26 +11,26 @@ license = "MIT/Apache-2.0"
 edition = "2018"
 
 [dependencies]
-ark-serialize = { version = "^0.3.0", default-features = false, features = [ "derive" ] }
-ark-ff = { version = "^0.3.0", default-features = false }
-ark-ec = { version = "^0.3.0", default-features = false }
-ark-poly = {version = "^0.3.0", default-features = false }
-ark-sponge = {version = "^0.3.0", default-features = false}
-
-ark-std = { version = "^0.3.0", default-features = false }
-ark-relations = { version = "^0.3.0", default-features = false, optional = true }
-ark-r1cs-std = { version = "^0.3.0", default-features = false, optional = true }
-hashbrown = { version = "0.9", optional = true }
-
-digest = "0.9"
-rayon = { version = "1", optional = true }
+ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] }
+ark-ff = { version = "^0.4.0", default-features = false }
+ark-ec = { version = "^0.4.0", default-features = false }
+ark-poly = {version = "^0.4.0", default-features = false }
+ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge"] }
+ark-std = { version = "^0.4.0", default-features = false }
+
+ark-relations = { version = "^0.4.0", default-features = false, optional = true }
+ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true }
+hashbrown = { version = "0.13", default-features = false, optional = true }
+
+digest = "0.10"
 derivative = { version = "2", features = [ "use_core" ] }
+rayon = { version = "1", optional = true }
 
 [dev-dependencies]
-ark-ed-on-bls12-381 = { version = "^0.3.0", default-features = false }
-ark-bls12-381 = { version = "^0.3.0", default-features = false, features = [ "curve" ] }
-ark-bls12-377 = { version = "^0.3.0", default-features = false, features = [ "curve" ] }
-blake2 = { version = "0.9", default-features = false }
+ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false }
+ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] }
+ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] }
+blake2 = { version = "0.10", default-features = false }
 rand_chacha = { version = "0.3.0", default-features = false }
 
 [profile.release]
@@ -55,22 +45,9 @@ debug-assertions = true
 incremental = true
 debug = true
 
-# To be removed in the new release.
-[patch.crates-io]
-ark-std = { git = "https://github.com/arkworks-rs/std" }
-ark-ec = { git = "https://github.com/arkworks-rs/algebra" }
-ark-ff = { git = "https://github.com/arkworks-rs/algebra" }
-ark-poly = { git = "https://github.com/arkworks-rs/algebra" }
-ark-serialize = { git = "https://github.com/arkworks-rs/algebra" }
-ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves" }
-ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves" }
-ark-ed-on-bls12-381 = { git = "https://github.com/arkworks-rs/curves" }
-ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std" }
-ark-sponge = { git = "https://github.com/arkworks-rs/sponge" }
-
 [features]
 default = [ "std", "parallel" ]
-std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-sponge/std"]
-r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-sponge/r1cs"]
+std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"]
+r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"]
 print-trace = [ "ark-std/print-trace" ]
 parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ]
diff --git a/src/challenge.rs b/src/challenge.rs
index aa78d90..23b3c9d 100644
--- a/src/challenge.rs
+++ b/src/challenge.rs
@@ -1,5 +1,5 @@
+use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize};
 use ark_ff::PrimeField;
-use ark_sponge::{CryptographicSponge, FieldElementSize};
 
 /// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy.
 /// For multivariate strategy, each challenge is freshly squeezed from a sponge.
diff --git a/src/constraints.rs b/src/constraints.rs
index 729b833..8abb14e 100644
--- a/src/constraints.rs
+++ b/src/constraints.rs
@@ -2,12 +2,12 @@ use crate::{
     data_structures::LabeledCommitment, BatchLCProof, LCTerm, LinearCombination,
     PolynomialCommitment, String, Vec,
 };
+use ark_crypto_primitives::sponge::CryptographicSponge;
 use ark_ff::PrimeField;
 use ark_poly::Polynomial;
 use ark_r1cs_std::fields::nonnative::NonNativeFieldVar;
 use ark_r1cs_std::{fields::fp::FpVar, prelude::*};
 use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError};
-use ark_sponge::CryptographicSponge;
 use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized};
 use hashbrown::{HashMap, HashSet};
 
diff --git a/src/data_structures.rs b/src/data_structures.rs
index 2259451..4802ec7 100644
--- a/src/data_structures.rs
+++ b/src/data_structures.rs
@@ -1,10 +1,9 @@
-use crate::{Polynomial, Rc, String, Vec};
+use crate::{Polynomial, String, Vec};
 use ark_ff::{Field, PrimeField, ToConstraintField};
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
+use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
 use ark_std::rand::RngCore;
 use ark_std::{
     borrow::Borrow,
-    io::{Read, Write},
     marker::PhantomData,
     ops::{AddAssign, MulAssign, SubAssign},
 };
@@ -62,12 +61,6 @@ pub trait PCCommitment: Clone + CanonicalSerialize + CanonicalDeserialize {
 
     /// Does this commitment have a degree bound?
     fn has_degree_bound(&self) -> bool;
-
-    /// Size in bytes
-    #[deprecated(since = "0.4.0", note = "Please use `.serialized_size()` instead.")]
-    fn size_in_bytes(&self) -> usize {
-        self.serialized_size()
-    }
 }
 
 /// Defines the minimal interface of prepared commitments for any polynomial
@@ -96,16 +89,6 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize {
     ) -> Self;
 }
 
-/// Defines the minimal interface of evaluation proofs for any polynomial
-/// commitment scheme.
-pub trait PCProof: Clone + CanonicalSerialize + CanonicalDeserialize {
-    /// Size in bytes
-    #[deprecated(since = "0.4.0", note = "Please use `.serialized_size()` instead.")]
-    fn size_in_bytes(&self) -> usize {
-        self.serialized_size()
-    }
-}
-
 /// A proof of satisfaction of linear combinations.
 #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)]
 pub struct BatchLCProof<F: PrimeField, T: Clone + CanonicalSerialize + CanonicalDeserialize> {
@@ -121,7 +104,7 @@ pub struct BatchLCProof<F: PrimeField, T: Clone + CanonicalSerialize + Canonical
 #[derive(Debug, Clone, CanonicalSerialize, CanonicalDeserialize)]
 pub struct LabeledPolynomial<F: Field, P: Polynomial<F>> {
     label: PolynomialLabel,
-    polynomial: Rc<P>,
+    polynomial: P,
     degree_bound: Option<usize>,
     hiding_bound: Option<usize>,
     _field: PhantomData<F>,
@@ -145,7 +128,7 @@ impl<'a, F: Field, P: Polynomial<F>> LabeledPolynomial<F, P> {
     ) -> Self {
         Self {
             label,
-            polynomial: Rc::new(polynomial),
+            polynomial: polynomial,
             degree_bound,
             hiding_bound,
             _field: PhantomData,
diff --git a/src/ipa_pc/data_structures.rs b/src/ipa_pc/data_structures.rs
index 8369bec..7ba56c9 100644
--- a/src/ipa_pc/data_structures.rs
+++ b/src/ipa_pc/data_structures.rs
@@ -1,18 +1,15 @@
 use crate::*;
 use crate::{PCCommitterKey, PCVerifierKey, Vec};
-use ark_ec::AffineCurve;
+use ark_ec::AffineRepr;
 use ark_ff::{Field, UniformRand, Zero};
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
+use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
 use ark_std::rand::RngCore;
-use ark_std::{
-    io::{Read, Write},
-    vec,
-};
+use ark_std::vec;
 
 /// `UniversalParams` are the universal parameters for the inner product arg scheme.
 #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)]
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
-pub struct UniversalParams<G: AffineCurve> {
+pub struct UniversalParams<G: AffineRepr> {
     /// The key used to commit to polynomials.
     pub comm_key: Vec<G>,
 
@@ -23,7 +20,7 @@ pub struct UniversalParams<G: AffineCurve> {
     pub s: G,
 }
 
-impl<G: AffineCurve> PCUniversalParams for UniversalParams<G> {
+impl<G: AffineRepr> PCUniversalParams for UniversalParams<G> {
     fn max_degree(&self) -> usize {
         self.comm_key.len() - 1
     }
@@ -38,7 +35,7 @@ impl<G: AffineCurve> PCUniversalParams for UniversalParams<G> {
     Clone(bound = ""),
     Debug(bound = "")
 )]
-pub struct CommitterKey<G: AffineCurve> {
+pub struct CommitterKey<G: AffineRepr> {
     /// The key used to commit to polynomials.
     pub comm_key: Vec<G>,
 
@@ -54,7 +51,7 @@ pub struct CommitterKey<G: AffineCurve> {
     pub max_degree: usize,
 }
 
-impl<G: AffineCurve> PCCommitterKey for CommitterKey<G> {
+impl<G: AffineRepr> PCCommitterKey for CommitterKey<G> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -66,7 +63,7 @@ impl<G: AffineCurve> PCCommitterKey for CommitterKey<G> {
 /// `VerifierKey` is used to check evaluation proofs for a given commitment.
 pub type VerifierKey<G> = CommitterKey<G>;
 
-impl<G: AffineCurve> PCVerifierKey for VerifierKey<G> {
+impl<G: AffineRepr> PCVerifierKey for VerifierKey<G> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -79,7 +76,7 @@ impl<G: AffineCurve> PCVerifierKey for VerifierKey<G> {
 /// Nothing to do to prepare this verifier key (for now).
 pub type PreparedVerifierKey<G> = VerifierKey<G>;
 
-impl<G: AffineCurve> PCPreparedVerifierKey<VerifierKey<G>> for PreparedVerifierKey<G> {
+impl<G: AffineRepr> PCPreparedVerifierKey<VerifierKey<G>> for PreparedVerifierKey<G> {
     /// prepare `PreparedVerifierKey` from `VerifierKey`
     fn prepare(vk: &VerifierKey<G>) -> Self {
         vk.clone()
@@ -97,7 +94,7 @@ impl<G: AffineCurve> PCPreparedVerifierKey<VerifierKey<G>> for PreparedVerifierK
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Commitment<G: AffineCurve> {
+pub struct Commitment<G: AffineRepr> {
     /// A Pedersen commitment to the polynomial.
     pub comm: G,
 
@@ -107,7 +104,7 @@ pub struct Commitment<G: AffineCurve> {
     pub shifted_comm: Option<G>,
 }
 
-impl<G: AffineCurve> PCCommitment for Commitment<G> {
+impl<G: AffineRepr> PCCommitment for Commitment<G> {
     #[inline]
     fn empty() -> Self {
         Commitment {
@@ -124,7 +121,7 @@ impl<G: AffineCurve> PCCommitment for Commitment<G> {
 /// Nothing to do to prepare this commitment (for now).
 pub type PreparedCommitment<E> = Commitment<E>;
 
-impl<G: AffineCurve> PCPreparedCommitment<Commitment<G>> for PreparedCommitment<G> {
+impl<G: AffineRepr> PCPreparedCommitment<Commitment<G>> for PreparedCommitment<G> {
     /// prepare `PreparedCommitment` from `Commitment`
     fn prepare(vk: &Commitment<G>) -> Self {
         vk.clone()
@@ -141,7 +138,7 @@ impl<G: AffineCurve> PCPreparedCommitment<Commitment<G>> for PreparedCommitment<
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Randomness<G: AffineCurve> {
+pub struct Randomness<G: AffineRepr> {
     /// Randomness is some scalar field element.
     pub rand: G::ScalarField,
 
@@ -149,7 +146,7 @@ pub struct Randomness<G: AffineCurve> {
     pub shifted_rand: Option<G::ScalarField>,
 }
 
-impl<G: AffineCurve> PCRandomness for Randomness<G> {
+impl<G: AffineRepr> PCRandomness for Randomness<G> {
     fn empty() -> Self {
         Self {
             rand: G::ScalarField::zero(),
@@ -177,7 +174,7 @@ impl<G: AffineCurve> PCRandomness for Randomness<G> {
     Clone(bound = ""),
     Debug(bound = "")
 )]
-pub struct Proof<G: AffineCurve> {
+pub struct Proof<G: AffineRepr> {
     /// Vector of left elements for each of the log_d iterations in `open`
     pub l_vec: Vec<G>,
 
@@ -199,8 +196,6 @@ pub struct Proof<G: AffineCurve> {
     pub rand: Option<G::ScalarField>,
 }
 
-impl<G: AffineCurve> PCProof for Proof<G> {}
-
 /// `SuccinctCheckPolynomial` is a succinctly-representated polynomial
 /// generated from the `log_d` random oracle challenges generated in `open`.
 /// It has the special property that can be evaluated in `O(log_d)` time.
diff --git a/src/ipa_pc/mod.rs b/src/ipa_pc/mod.rs
index 4751f8c..53326e5 100644
--- a/src/ipa_pc/mod.rs
+++ b/src/ipa_pc/mod.rs
@@ -3,7 +3,7 @@ use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet};
 use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination};
 use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment};
 
-use ark_ec::{msm::VariableBaseMSM, AffineCurve, ProjectiveCurve};
+use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
 use ark_ff::{Field, One, PrimeField, UniformRand, Zero};
 use ark_serialize::CanonicalSerialize;
 use ark_std::rand::RngCore;
@@ -16,7 +16,7 @@ pub use data_structures::*;
 use rayon::prelude::*;
 
 use crate::challenge::ChallengeGenerator;
-use ark_sponge::CryptographicSponge;
+use ark_crypto_primitives::sponge::CryptographicSponge;
 use digest::Digest;
 
 /// A polynomial commitment scheme based on the hardness of the
@@ -33,7 +33,7 @@ use digest::Digest;
 /// [pcdas]: https://eprint.iacr.org/2020/499
 /// [marlin]: https://eprint.iacr.org/2019/1047
 pub struct InnerProductArgPC<
-    G: AffineCurve,
+    G: AffineRepr,
     D: Digest,
     P: DenseUVPolynomial<G::ScalarField>,
     S: CryptographicSponge,
@@ -46,8 +46,8 @@ pub struct InnerProductArgPC<
 
 impl<G, D, P, S> InnerProductArgPC<G, D, P, S>
 where
-    G: AffineCurve,
-    G::Projective: VariableBaseMSM<MSMBase = G, Scalar = G::ScalarField>,
+    G: AffineRepr,
+    G::Group: VariableBaseMSM<MulBase = G>,
     D: Digest,
     P: DenseUVPolynomial<G::ScalarField>,
     S: CryptographicSponge,
@@ -62,12 +62,12 @@ where
         scalars: &[G::ScalarField],
         hiding_generator: Option<G>,
         randomizer: Option<G::ScalarField>,
-    ) -> G::Projective {
+    ) -> G::Group {
         let scalars_bigint = ark_std::cfg_iter!(scalars)
             .map(|s| s.into_bigint())
             .collect::<Vec<_>>();
 
-        let mut comm = <G::Projective as VariableBaseMSM>::msm_bigint(comm_key, &scalars_bigint);
+        let mut comm = <G::Group as VariableBaseMSM>::msm_bigint(comm_key, &scalars_bigint);
 
         if randomizer.is_some() {
             assert!(hiding_generator.is_some());
@@ -114,7 +114,7 @@ where
         // `log_d` is ceil(log2 (d + 1)), which is the number of steps to compute all of the challenges
         let log_d = ark_std::log2(d + 1) as usize;
 
-        let mut combined_commitment_proj = G::Projective::zero();
+        let mut combined_commitment_proj = G::Group::zero();
         let mut combined_v = G::ScalarField::zero();
 
         let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE);
@@ -196,7 +196,7 @@ where
         let v_prime = check_poly.evaluate(point) * &proof.c;
         let h_prime = h_prime.into_affine();
 
-        let check_commitment_elem: G::Projective = Self::cm_commit(
+        let check_commitment_elem: G::Group = Self::cm_commit(
             &[proof.final_comm_key.clone(), h_prime],
             &[proof.c.clone(), v_prime],
             None,
@@ -262,10 +262,10 @@ where
     }
 
     fn combine_shifted_comm(
-        combined_comm: Option<G::Projective>,
+        combined_comm: Option<G::Group>,
         new_comm: Option<G>,
         coeff: G::ScalarField,
-    ) -> Option<G::Projective> {
+    ) -> Option<G::Group> {
         if let Some(new_comm) = new_comm {
             let coeff_new_comm = new_comm.mul(coeff);
             return Some(combined_comm.map_or(coeff_new_comm, |c| c + &coeff_new_comm));
@@ -276,9 +276,9 @@ where
 
     fn construct_labeled_commitments(
         lc_info: &[(String, Option<usize>)],
-        elements: &[G::Projective],
+        elements: &[G::Group],
     ) -> Vec<LabeledCommitment<Commitment<G>>> {
-        let comms = G::Projective::batch_normalization_into_affine(elements);
+        let comms = G::Group::normalize_batch(elements);
         let mut commitments = Vec::new();
 
         let mut i = 0;
@@ -327,18 +327,18 @@ where
                     j += 1;
                 }
                 let generator = g.unwrap();
-                generator.mul_by_cofactor_to_projective()
+                generator.mul_by_cofactor_to_group()
             })
             .collect();
 
-        G::Projective::batch_normalization_into_affine(&generators)
+        G::Group::normalize_batch(&generators)
     }
 }
 
 impl<G, D, P, S> PolynomialCommitment<G::ScalarField, P, S> for InnerProductArgPC<G, D, P, S>
 where
-    G: AffineCurve,
-    G::Projective: VariableBaseMSM<MSMBase = G, Scalar = G::ScalarField>,
+    G: AffineRepr,
+    G::Group: VariableBaseMSM<MulBase = G>,
     D: Digest,
     P: DenseUVPolynomial<G::ScalarField, Point = G::ScalarField>,
     S: CryptographicSponge,
@@ -501,7 +501,7 @@ where
     {
         let mut combined_polynomial = P::zero();
         let mut combined_rand = G::ScalarField::zero();
-        let mut combined_commitment_proj = G::Projective::zero();
+        let mut combined_commitment_proj = G::Group::zero();
 
         let mut has_hiding = false;
 
@@ -595,10 +595,8 @@ where
                 Some(hiding_rand),
             );
 
-            let mut batch = G::Projective::batch_normalization_into_affine(&[
-                combined_commitment_proj,
-                hiding_commitment_proj,
-            ]);
+            let mut batch =
+                G::Group::normalize_batch(&[combined_commitment_proj, hiding_commitment_proj]);
             hiding_commitment = Some(batch.pop().unwrap());
             combined_commitment = batch.pop().unwrap();
 
@@ -664,7 +662,7 @@ where
         let mut z = z.as_mut_slice();
 
         // This will be used for transforming the key in each step
-        let mut key_proj: Vec<G::Projective> = ck.comm_key.iter().map(|x| (*x).into()).collect();
+        let mut key_proj: Vec<G::Group> = ck.comm_key.iter().map(|x| (*x).into()).collect();
         let mut key_proj = key_proj.as_mut_slice();
 
         let mut temp;
@@ -689,7 +687,7 @@ where
             let r = Self::cm_commit(key_r, coeffs_l, None, None)
                 + &h_prime.mul(Self::inner_product(coeffs_l, z_r));
 
-            let lr = G::Projective::batch_normalization_into_affine(&[l, r]);
+            let lr = G::Group::normalize_batch(&[l, r]);
             l_vec.push(lr[0]);
             r_vec.push(lr[1]);
 
@@ -719,7 +717,7 @@ where
             z = z_l;
 
             key_proj = key_proj_l;
-            temp = G::Projective::batch_normalization_into_affine(key_proj);
+            temp = G::Group::normalize_batch(key_proj);
             comm_key = &temp;
 
             n /= 2;
@@ -815,7 +813,7 @@ where
         let mut randomizer = G::ScalarField::one();
 
         let mut combined_check_poly = P::zero();
-        let mut combined_final_key = G::Projective::zero();
+        let mut combined_final_key = G::Group::zero();
 
         for ((_point_label, (point, labels)), p) in query_to_labels_map.into_iter().zip(proof) {
             let lc_time =
@@ -907,8 +905,8 @@ where
             let mut degree_bound = None;
             let mut hiding_bound = None;
 
-            let mut combined_comm = G::Projective::zero();
-            let mut combined_shifted_comm: Option<G::Projective> = None;
+            let mut combined_comm = G::Group::zero();
+            let mut combined_shifted_comm: Option<G::Group> = None;
 
             let mut combined_rand = G::ScalarField::zero();
             let mut combined_shifted_rand: Option<G::ScalarField> = None;
@@ -1011,8 +1009,8 @@ where
             let num_polys = lc.len();
 
             let mut degree_bound = None;
-            let mut combined_comm = G::Projective::zero();
-            let mut combined_shifted_comm: Option<G::Projective> = None;
+            let mut combined_comm = G::Group::zero();
+            let mut combined_shifted_comm: Option<G::Group> = None;
 
             for (coeff, label) in lc.iter() {
                 if label.is_one() {
@@ -1075,18 +1073,18 @@ mod tests {
     #![allow(non_camel_case_types)]
 
     use super::InnerProductArgPC;
-    use ark_ec::AffineCurve;
+    use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
+    use ark_ec::AffineRepr;
     use ark_ed_on_bls12_381::{EdwardsAffine, Fr};
     use ark_ff::PrimeField;
     use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial};
-    use ark_sponge::poseidon::PoseidonSponge;
-    use blake2::Blake2s;
+    use blake2::Blake2s256;
     use rand_chacha::ChaCha20Rng;
 
     type UniPoly = DensePoly<Fr>;
-    type Sponge = PoseidonSponge<<EdwardsAffine as AffineCurve>::ScalarField>;
+    type Sponge = PoseidonSponge<<EdwardsAffine as AffineRepr>::ScalarField>;
     type PC<E, D, P, S> = InnerProductArgPC<E, D, P, S>;
-    type PC_JJB2S = PC<EdwardsAffine, Blake2s, UniPoly, Sponge>;
+    type PC_JJB2S = PC<EdwardsAffine, Blake2s256, UniPoly, Sponge>;
 
     fn rand_poly<F: PrimeField>(
         degree: usize,
diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs
index 161e86f..fb5390a 100644
--- a/src/kzg10/data_structures.rs
+++ b/src/kzg10/data_structures.rs
@@ -1,7 +1,11 @@
 use crate::*;
-use ark_ec::{PairingEngine, ProjectiveCurve};
-use ark_ff::{PrimeField, ToConstraintField, Zero};
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
+use ark_ec::pairing::Pairing;
+use ark_ec::AffineRepr;
+use ark_ec::Group;
+use ark_ff::{PrimeField, ToConstraintField};
+use ark_serialize::{
+    CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate,
+};
 use ark_std::{
     borrow::Cow,
     io::{Read, Write},
@@ -17,7 +21,7 @@ use ark_std::{
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct UniversalParams<E: PairingEngine> {
+pub struct UniversalParams<E: Pairing> {
     /// Group elements of the form `{ \beta^i G }`, where `i` ranges from 0 to `degree`.
     pub powers_of_g: Vec<E::G1Affine>,
     /// Group elements of the form `{ \beta^i \gamma G }`, where `i` ranges from 0 to `degree`.
@@ -36,89 +40,63 @@ pub struct UniversalParams<E: PairingEngine> {
     pub prepared_beta_h: E::G2Prepared,
 }
 
-impl<E: PairingEngine> PCUniversalParams for UniversalParams<E> {
+impl<E: Pairing> Valid for UniversalParams<E> {
+    fn check(&self) -> Result<(), SerializationError> {
+        self.powers_of_g.check()?;
+        self.powers_of_gamma_g.check()?;
+        self.h.check()?;
+        self.beta_h.check()?;
+        self.neg_powers_of_h.check()?;
+        Ok(())
+    }
+}
+impl<E: Pairing> PCUniversalParams for UniversalParams<E> {
     fn max_degree(&self) -> usize {
         self.powers_of_g.len() - 1
     }
 }
 
-impl<E: PairingEngine> CanonicalSerialize for UniversalParams<E> {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize(&mut writer)?;
-        self.powers_of_gamma_g.serialize(&mut writer)?;
-        self.h.serialize(&mut writer)?;
-        self.beta_h.serialize(&mut writer)?;
-        self.neg_powers_of_h.serialize(&mut writer)
-    }
-
-    fn serialized_size(&self) -> usize {
-        self.powers_of_g.serialized_size()
-            + self.powers_of_gamma_g.serialized_size()
-            + self.h.serialized_size()
-            + self.beta_h.serialized_size()
-            + self.neg_powers_of_h.serialized_size()
-    }
-
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_unchecked(&mut writer)?;
-        self.powers_of_gamma_g.serialize_unchecked(&mut writer)?;
-        self.h.serialize_unchecked(&mut writer)?;
-        self.beta_h.serialize_unchecked(&mut writer)?;
-        self.neg_powers_of_h.serialize_unchecked(&mut writer)
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_uncompressed(&mut writer)?;
-        self.powers_of_gamma_g.serialize_uncompressed(&mut writer)?;
-        self.h.serialize_uncompressed(&mut writer)?;
-        self.beta_h.serialize_uncompressed(&mut writer)?;
-        self.neg_powers_of_h.serialize_uncompressed(&mut writer)
-    }
-
-    fn uncompressed_size(&self) -> usize {
-        self.powers_of_g.uncompressed_size()
-            + self.powers_of_gamma_g.uncompressed_size()
-            + self.h.uncompressed_size()
-            + self.beta_h.uncompressed_size()
-            + self.neg_powers_of_h.uncompressed_size()
+impl<E: Pairing> CanonicalSerialize for UniversalParams<E> {
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.powers_of_g
+            .serialize_with_mode(&mut writer, compress)?;
+        self.powers_of_gamma_g
+            .serialize_with_mode(&mut writer, compress)?;
+        self.h.serialize_with_mode(&mut writer, compress)?;
+        self.beta_h.serialize_with_mode(&mut writer, compress)?;
+        self.neg_powers_of_h
+            .serialize_with_mode(&mut writer, compress)
+    }
+
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.powers_of_g.serialized_size(compress)
+            + self.powers_of_gamma_g.serialized_size(compress)
+            + self.h.serialized_size(compress)
+            + self.beta_h.serialized_size(compress)
+            + self.neg_powers_of_h.serialized_size(compress)
     }
 }
 
-impl<E: PairingEngine> CanonicalDeserialize for UniversalParams<E> {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize(&mut reader)?;
-        let powers_of_gamma_g = BTreeMap::<usize, E::G1Affine>::deserialize(&mut reader)?;
-        let h = E::G2Affine::deserialize(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize(&mut reader)?;
-        let neg_powers_of_h = BTreeMap::<usize, E::G2Affine>::deserialize(&mut reader)?;
-
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
-
-        Ok(Self {
-            powers_of_g,
-            powers_of_gamma_g,
-            h,
-            beta_h,
-            neg_powers_of_h,
-            prepared_h,
-            prepared_beta_h,
-        })
-    }
-
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize_uncompressed(&mut reader)?;
+impl<E: Pairing> CanonicalDeserialize for UniversalParams<E> {
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let powers_of_g = Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?;
         let powers_of_gamma_g =
-            BTreeMap::<usize, E::G1Affine>::deserialize_uncompressed(&mut reader)?;
-        let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let neg_powers_of_h =
-            BTreeMap::<usize, E::G2Affine>::deserialize_uncompressed(&mut reader)?;
+            BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let neg_powers_of_h = BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?;
 
         let prepared_h = E::G2Prepared::from(h.clone());
         let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
-
-        Ok(Self {
+        let result = Self {
             powers_of_g,
             powers_of_gamma_g,
             h,
@@ -126,28 +104,12 @@ impl<E: PairingEngine> CanonicalDeserialize for UniversalParams<E> {
             neg_powers_of_h,
             prepared_h,
             prepared_beta_h,
-        })
-    }
-
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize_unchecked(&mut reader)?;
-        let powers_of_gamma_g = BTreeMap::<usize, E::G1Affine>::deserialize_unchecked(&mut reader)?;
-        let h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let neg_powers_of_h = BTreeMap::<usize, E::G2Affine>::deserialize_unchecked(&mut reader)?;
-
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
+        };
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
 
-        Ok(Self {
-            powers_of_g,
-            powers_of_gamma_g,
-            h,
-            beta_h,
-            neg_powers_of_h,
-            prepared_h,
-            prepared_beta_h,
-        })
+        Ok(result)
     }
 }
 
@@ -161,67 +123,58 @@ impl<E: PairingEngine> CanonicalDeserialize for UniversalParams<E> {
     Debug(bound = ""),
     PartialEq
 )]
-pub struct Powers<'a, E: PairingEngine> {
+pub struct Powers<'a, E: Pairing> {
     /// Group elements of the form `β^i G`, for different values of `i`.
     pub powers_of_g: Cow<'a, [E::G1Affine]>,
     /// Group elements of the form `β^i γG`, for different values of `i`.
     pub powers_of_gamma_g: Cow<'a, [E::G1Affine]>,
 }
 
-impl<E: PairingEngine> Powers<'_, E> {
+impl<E: Pairing> Powers<'_, E> {
     /// The number of powers in `self`.
     pub fn size(&self) -> usize {
         self.powers_of_g.len()
     }
 }
-
-impl<'a, E: PairingEngine> CanonicalSerialize for Powers<'a, E> {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize(&mut writer)?;
-        self.powers_of_gamma_g.serialize(&mut writer)
-    }
-
-    fn serialized_size(&self) -> usize {
-        self.powers_of_g.serialized_size() + self.powers_of_gamma_g.serialized_size()
-    }
-
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_unchecked(&mut writer)?;
-        self.powers_of_gamma_g.serialize_unchecked(&mut writer)
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_uncompressed(&mut writer)?;
-        self.powers_of_gamma_g.serialize_uncompressed(&mut writer)
+impl<'a, E: Pairing> Valid for Powers<'a, E> {
+    fn check(&self) -> Result<(), SerializationError> {
+        Ok(())
     }
 }
-
-impl<'a, E: PairingEngine> CanonicalDeserialize for Powers<'a, E> {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize(&mut reader)?;
-        let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize(&mut reader)?;
-        Ok(Self {
-            powers_of_g: Cow::Owned(powers_of_g),
-            powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),
-        })
-    }
-
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize_unchecked(&mut reader)?;
-        let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize_unchecked(&mut reader)?;
-        Ok(Self {
-            powers_of_g: Cow::Owned(powers_of_g),
-            powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),
-        })
+impl<'a, E: Pairing> CanonicalSerialize for Powers<'a, E> {
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.powers_of_g
+            .serialize_with_mode(&mut writer, compress)?;
+        self.powers_of_gamma_g
+            .serialize_with_mode(&mut writer, compress)
+    }
+
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.powers_of_g.serialized_size(compress)
+            + self.powers_of_gamma_g.serialized_size(compress)
     }
+}
 
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = Vec::<E::G1Affine>::deserialize_uncompressed(&mut reader)?;
-        let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize_uncompressed(&mut reader)?;
-        Ok(Self {
+impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> {
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let powers_of_g = Vec::deserialize_with_mode(&mut reader, compress, validate)?;
+        let powers_of_gamma_g = Vec::deserialize_with_mode(&mut reader, compress, validate)?;
+        let result = Self {
             powers_of_g: Cow::Owned(powers_of_g),
             powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),
-        })
+        };
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
+        Ok(result)
     }
 }
 /// `VerifierKey` is used to check evaluation proofs for a given commitment.
@@ -233,7 +186,7 @@ impl<'a, E: PairingEngine> CanonicalDeserialize for Powers<'a, E> {
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// The generator of G1.
     pub g: E::G1Affine,
     /// The generator of G1 that is used for making a commitment hiding.
@@ -250,108 +203,72 @@ pub struct VerifierKey<E: PairingEngine> {
     pub prepared_beta_h: E::G2Prepared,
 }
 
-impl<E: PairingEngine> CanonicalSerialize for VerifierKey<E> {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize(&mut writer)?;
-        self.gamma_g.serialize(&mut writer)?;
-        self.h.serialize(&mut writer)?;
-        self.beta_h.serialize(&mut writer)
-    }
+impl<E: Pairing> Valid for VerifierKey<E> {
+    fn check(&self) -> Result<(), SerializationError> {
+        self.g.check()?;
+        self.gamma_g.check()?;
+        self.h.check()?;
+        self.beta_h.check()?;
 
-    fn serialized_size(&self) -> usize {
-        self.g.serialized_size()
-            + self.gamma_g.serialized_size()
-            + self.h.serialized_size()
-            + self.beta_h.serialized_size()
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_uncompressed(&mut writer)?;
-        self.gamma_g.serialize_uncompressed(&mut writer)?;
-        self.h.serialize_uncompressed(&mut writer)?;
-        self.beta_h.serialize_uncompressed(&mut writer)
+        Ok(())
     }
+}
 
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_unchecked(&mut writer)?;
-        self.gamma_g.serialize_unchecked(&mut writer)?;
-        self.h.serialize_unchecked(&mut writer)?;
-        self.beta_h.serialize_unchecked(&mut writer)
+impl<E: Pairing> CanonicalSerialize for VerifierKey<E> {
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.g.serialize_with_mode(&mut writer, compress)?;
+        self.gamma_g.serialize_with_mode(&mut writer, compress)?;
+        self.h.serialize_with_mode(&mut writer, compress)?;
+        self.beta_h.serialize_with_mode(&mut writer, compress)
     }
 
-    fn uncompressed_size(&self) -> usize {
-        self.g.uncompressed_size()
-            + self.gamma_g.uncompressed_size()
-            + self.h.uncompressed_size()
-            + self.beta_h.uncompressed_size()
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.g.serialized_size(compress)
+            + self.gamma_g.serialized_size(compress)
+            + self.h.serialized_size(compress)
+            + self.beta_h.serialized_size(compress)
     }
 }
 
-impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize(&mut reader)?;
-        let h = E::G2Affine::deserialize(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize(&mut reader)?;
-
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
-
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h,
-            prepared_beta_h,
-        })
-    }
-
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
+impl<E: Pairing> CanonicalDeserialize for VerifierKey<E> {
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
 
         let prepared_h = E::G2Prepared::from(h.clone());
         let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
-
-        Ok(Self {
+        let result = Self {
             g,
             gamma_g,
             h,
             beta_h,
             prepared_h,
             prepared_beta_h,
-        })
-    }
-
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
+        };
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
 
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h,
-            prepared_beta_h,
-        })
+        Ok(result)
     }
 }
 
-impl<E: PairingEngine> ToConstraintField<<E::Fq as Field>::BasePrimeField> for VerifierKey<E>
+impl<E: Pairing> ToConstraintField<<E::TargetField as Field>::BasePrimeField> for VerifierKey<E>
 where
-    E::G1Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
-    E::G2Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
+    E::G1Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
+    E::G2Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
 {
-    fn to_field_elements(&self) -> Option<Vec<<E::Fq as Field>::BasePrimeField>> {
+    fn to_field_elements(&self) -> Option<Vec<<E::TargetField as Field>::BasePrimeField>> {
         let mut res = Vec::new();
 
         res.extend_from_slice(&self.g.to_field_elements().unwrap());
@@ -367,7 +284,7 @@ where
 /// We omit gamma here for simplicity.
 #[derive(Derivative)]
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
-pub struct PreparedVerifierKey<E: PairingEngine> {
+pub struct PreparedVerifierKey<E: Pairing> {
     /// The generator of G1, prepared for power series.
     pub prepared_g: Vec<E::G1Affine>,
     /// The generator of G2, prepared for use in pairings.
@@ -376,13 +293,13 @@ pub struct PreparedVerifierKey<E: PairingEngine> {
     pub prepared_beta_h: E::G2Prepared,
 }
 
-impl<E: PairingEngine> PreparedVerifierKey<E> {
+impl<E: Pairing> PreparedVerifierKey<E> {
     /// prepare `PreparedVerifierKey` from `VerifierKey`
     pub fn prepare(vk: &VerifierKey<E>) -> Self {
-        let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
 
         let mut prepared_g = Vec::<E::G1Affine>::new();
-        let mut g = E::G1Projective::from(vk.g.clone());
+        let mut g = E::G1::from(vk.g.clone());
         for _ in 0..supported_bits {
             prepared_g.push(g.clone().into());
             g.double_in_place();
@@ -407,12 +324,12 @@ impl<E: PairingEngine> PreparedVerifierKey<E> {
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Commitment<E: PairingEngine>(
+pub struct Commitment<E: Pairing>(
     /// The commitment is a group element.
     pub E::G1Affine,
 );
 
-impl<E: PairingEngine> PCCommitment for Commitment<E> {
+impl<E: Pairing> PCCommitment for Commitment<E> {
     #[inline]
     fn empty() -> Self {
         Commitment(E::G1Affine::zero())
@@ -423,20 +340,20 @@ impl<E: PairingEngine> PCCommitment for Commitment<E> {
     }
 }
 
-impl<E: PairingEngine> ToConstraintField<<E::Fq as Field>::BasePrimeField> for Commitment<E>
+impl<E: Pairing> ToConstraintField<<E::TargetField as Field>::BasePrimeField> for Commitment<E>
 where
-    E::G1Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
+    E::G1Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
 {
-    fn to_field_elements(&self) -> Option<Vec<<E::Fq as Field>::BasePrimeField>> {
+    fn to_field_elements(&self) -> Option<Vec<<E::TargetField as Field>::BasePrimeField>> {
         self.0.to_field_elements()
     }
 }
 
-impl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Commitment<E>)> for Commitment<E> {
+impl<'a, E: Pairing> AddAssign<(E::ScalarField, &'a Commitment<E>)> for Commitment<E> {
     #[inline]
-    fn add_assign(&mut self, (f, other): (E::Fr, &'a Commitment<E>)) {
+    fn add_assign(&mut self, (f, other): (E::ScalarField, &'a Commitment<E>)) {
         let mut other = other.0 * f;
-        other.add_assign_mixed(&self.0);
+        other.add_assign(&self.0);
         self.0 = other.into();
     }
 }
@@ -451,18 +368,18 @@ impl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Commitment<E>)> for Commitment<
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct PreparedCommitment<E: PairingEngine>(
+pub struct PreparedCommitment<E: Pairing>(
     /// The commitment is a group element.
     pub Vec<E::G1Affine>,
 );
 
-impl<E: PairingEngine> PreparedCommitment<E> {
+impl<E: Pairing> PreparedCommitment<E> {
     /// prepare `PreparedCommitment` from `Commitment`
     pub fn prepare(comm: &Commitment<E>) -> Self {
         let mut prepared_comm = Vec::<E::G1Affine>::new();
-        let mut cur = E::G1Projective::from(comm.0.clone());
+        let mut cur = E::G1::from(comm.0.clone());
 
-        let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
 
         for _ in 0..supported_bits {
             prepared_comm.push(cur.clone().into());
@@ -570,12 +487,10 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial<F>> AddAssign<(F, &'a Randomness<F,
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Proof<E: PairingEngine> {
+pub struct Proof<E: Pairing> {
     /// This is a commitment to the witness polynomial; see [KZG10] for more details.
     pub w: E::G1Affine,
     /// This is the evaluation of the random polynomial at the point for which
     /// the evaluation proof was produced.
-    pub random_v: Option<E::Fr>,
+    pub random_v: Option<E::ScalarField>,
 }
-
-impl<E: PairingEngine> PCProof for Proof<E> {}
diff --git a/src/kzg10/mod.rs b/src/kzg10/mod.rs
index 33fcdb1..4c52390 100644
--- a/src/kzg10/mod.rs
+++ b/src/kzg10/mod.rs
@@ -6,8 +6,9 @@
 //! This construction achieves extractability in the algebraic group model (AGM).
 
 use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec};
-use ark_ec::msm::{FixedBase, VariableBaseMSM};
-use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
+use ark_ec::AffineRepr;
+use ark_ec::{pairing::Pairing, CurveGroup};
+use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM};
 use ark_ff::{One, PrimeField, UniformRand, Zero};
 use ark_poly::DenseUVPolynomial;
 use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec};
@@ -23,15 +24,15 @@ pub use data_structures::*;
 /// [Kate, Zaverucha and Goldbgerg][kzg10]
 ///
 /// [kzg10]: http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf
-pub struct KZG10<E: PairingEngine, P: DenseUVPolynomial<E::Fr>> {
+pub struct KZG10<E: Pairing, P: DenseUVPolynomial<E::ScalarField>> {
     _engine: PhantomData<E>,
     _poly: PhantomData<P>,
 }
 
 impl<E, P> KZG10<E, P>
 where
-    E: PairingEngine,
-    P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+    E: Pairing,
+    P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
     for<'a, 'b> &'a P: Div<&'b P, Output = P>,
 {
     /// Constructs public parameters when given as input the maximum degree `degree`
@@ -45,12 +46,12 @@ where
             return Err(Error::DegreeIsZero);
         }
         let setup_time = start_timer!(|| format!("KZG10::Setup with degree {}", max_degree));
-        let beta = E::Fr::rand(rng);
-        let g = E::G1Projective::rand(rng);
-        let gamma_g = E::G1Projective::rand(rng);
-        let h = E::G2Projective::rand(rng);
+        let beta = E::ScalarField::rand(rng);
+        let g = E::G1::rand(rng);
+        let gamma_g = E::G1::rand(rng);
+        let h = E::G2::rand(rng);
 
-        let mut powers_of_beta = vec![E::Fr::one()];
+        let mut powers_of_beta = vec![E::ScalarField::one()];
 
         let mut cur = beta;
         for _ in 0..max_degree {
@@ -60,50 +61,45 @@ where
 
         let window_size = FixedBase::get_mul_window_size(max_degree + 1);
 
-        let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
         let g_time = start_timer!(|| "Generating powers of G");
         let g_table = FixedBase::get_window_table(scalar_bits, window_size, g);
         let powers_of_g =
-            FixedBase::msm::<E::G1Projective>(scalar_bits, window_size, &g_table, &powers_of_beta);
+            FixedBase::msm::<E::G1>(scalar_bits, window_size, &g_table, &powers_of_beta);
         end_timer!(g_time);
         let gamma_g_time = start_timer!(|| "Generating powers of gamma * G");
         let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g);
-        let mut powers_of_gamma_g = FixedBase::msm::<E::G1Projective>(
-            scalar_bits,
-            window_size,
-            &gamma_g_table,
-            &powers_of_beta,
-        );
+        let mut powers_of_gamma_g =
+            FixedBase::msm::<E::G1>(scalar_bits, window_size, &gamma_g_table, &powers_of_beta);
         // Add an additional power of gamma_g, because we want to be able to support
         // up to D queries.
         powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta));
         end_timer!(gamma_g_time);
 
-        let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g);
-        let powers_of_gamma_g =
-            E::G1Projective::batch_normalization_into_affine(&powers_of_gamma_g)
-                .into_iter()
-                .enumerate()
-                .collect();
+        let powers_of_g = E::G1::normalize_batch(&powers_of_g);
+        let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g)
+            .into_iter()
+            .enumerate()
+            .collect();
 
         let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2");
         let neg_powers_of_h = if produce_g2_powers {
-            let mut neg_powers_of_beta = vec![E::Fr::one()];
-            let mut cur = E::Fr::one() / &beta;
+            let mut neg_powers_of_beta = vec![E::ScalarField::one()];
+            let mut cur = E::ScalarField::one() / &beta;
             for _ in 0..max_degree {
                 neg_powers_of_beta.push(cur);
                 cur /= &beta;
             }
 
             let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h);
-            let neg_powers_of_h = FixedBase::msm::<E::G2Projective>(
+            let neg_powers_of_h = FixedBase::msm::<E::G2>(
                 scalar_bits,
                 window_size,
                 &neg_h_table,
                 &neg_powers_of_beta,
             );
 
-            let affines = E::G2Projective::batch_normalization_into_affine(&neg_powers_of_h);
+            let affines = E::G2::normalize_batch(&neg_powers_of_h);
             let mut affines_map = BTreeMap::new();
             affines.into_iter().enumerate().for_each(|(i, a)| {
                 affines_map.insert(i, a);
@@ -139,7 +135,7 @@ where
         polynomial: &P,
         hiding_bound: Option<usize>,
         rng: Option<&mut dyn RngCore>,
-    ) -> Result<(Commitment<E>, Randomness<E::Fr, P>), Error> {
+    ) -> Result<(Commitment<E>, Randomness<E::ScalarField, P>), Error> {
         Self::check_degree_is_too_large(polynomial.degree(), powers.size())?;
 
         let commit_time = start_timer!(|| format!(
@@ -152,13 +148,13 @@ where
             skip_leading_zeros_and_convert_to_bigints(polynomial);
 
         let msm_time = start_timer!(|| "MSM to compute commitment to plaintext poly");
-        let mut commitment = <E::G1Projective as VariableBaseMSM>::msm_bigint(
+        let mut commitment = <E::G1 as VariableBaseMSM>::msm_bigint(
             &powers.powers_of_g[num_leading_zeros..],
             &plain_coeffs,
         );
         end_timer!(msm_time);
 
-        let mut randomness = Randomness::<E::Fr, P>::empty();
+        let mut randomness = Randomness::<E::ScalarField, P>::empty();
         if let Some(hiding_degree) = hiding_bound {
             let mut rng = rng.ok_or(Error::MissingRng)?;
             let sample_random_poly_time = start_timer!(|| format!(
@@ -176,14 +172,14 @@ where
 
         let random_ints = convert_to_bigints(&randomness.blinding_polynomial.coeffs());
         let msm_time = start_timer!(|| "MSM to compute commitment to random poly");
-        let random_commitment = <E::G1Projective as VariableBaseMSM>::msm_bigint(
+        let random_commitment = <E::G1 as VariableBaseMSM>::msm_bigint(
             &powers.powers_of_gamma_g,
             random_ints.as_slice(),
         )
         .into_affine();
         end_timer!(msm_time);
 
-        commitment.add_assign_mixed(&random_commitment);
+        commitment += &random_commitment;
 
         end_timer!(commit_time);
         Ok((Commitment(commitment.into()), randomness))
@@ -197,9 +193,9 @@ where
     pub fn compute_witness_polynomial(
         p: &P,
         point: P::Point,
-        randomness: &Randomness<E::Fr, P>,
+        randomness: &Randomness<E::ScalarField, P>,
     ) -> Result<(P, Option<P>), Error> {
-        let divisor = P::from_coefficients_vec(vec![-point, E::Fr::one()]);
+        let divisor = P::from_coefficients_vec(vec![-point, E::ScalarField::one()]);
 
         let witness_time = start_timer!(|| "Computing witness polynomial");
         let witness_polynomial = p / &divisor;
@@ -222,7 +218,7 @@ where
     pub(crate) fn open_with_witness_polynomial<'a>(
         powers: &Powers<E>,
         point: P::Point,
-        randomness: &Randomness<E::Fr, P>,
+        randomness: &Randomness<E::ScalarField, P>,
         witness_polynomial: &P,
         hiding_witness_polynomial: Option<&P>,
     ) -> Result<Proof<E>, Error> {
@@ -231,7 +227,7 @@ where
             skip_leading_zeros_and_convert_to_bigints(witness_polynomial);
 
         let witness_comm_time = start_timer!(|| "Computing commitment to witness polynomial");
-        let mut w = <E::G1Projective as VariableBaseMSM>::msm_bigint(
+        let mut w = <E::G1 as VariableBaseMSM>::msm_bigint(
             &powers.powers_of_g[num_leading_zeros..],
             &witness_coeffs,
         );
@@ -246,7 +242,7 @@ where
             let random_witness_coeffs = convert_to_bigints(&hiding_witness_polynomial.coeffs());
             let witness_comm_time =
                 start_timer!(|| "Computing commitment to random witness polynomial");
-            w += &<E::G1Projective as VariableBaseMSM>::msm_bigint(
+            w += &<E::G1 as VariableBaseMSM>::msm_bigint(
                 &powers.powers_of_gamma_g,
                 &random_witness_coeffs,
             );
@@ -267,7 +263,7 @@ where
         powers: &Powers<E>,
         p: &P,
         point: P::Point,
-        rand: &Randomness<E::Fr, P>,
+        rand: &Randomness<E::ScalarField, P>,
     ) -> Result<Proof<E>, Error> {
         Self::check_degree_is_too_large(p.degree(), powers.size())?;
         let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree()));
@@ -293,18 +289,18 @@ where
     pub fn check(
         vk: &VerifierKey<E>,
         comm: &Commitment<E>,
-        point: E::Fr,
-        value: E::Fr,
+        point: E::ScalarField,
+        value: E::ScalarField,
         proof: &Proof<E>,
     ) -> Result<bool, Error> {
         let check_time = start_timer!(|| "Checking evaluation");
-        let mut inner = comm.0.into_projective() - &vk.g.mul(value);
+        let mut inner = comm.0.into_group() - &vk.g.mul(value);
         if let Some(random_v) = proof.random_v {
             inner -= &vk.gamma_g.mul(random_v);
         }
         let lhs = E::pairing(inner, vk.h);
 
-        let inner = vk.beta_h.into_projective() - &vk.h.mul(point);
+        let inner = vk.beta_h.into_group() - &vk.h.mul(point);
         let rhs = E::pairing(proof.w, inner);
 
         end_timer!(check_time, || format!("Result: {}", lhs == rhs));
@@ -316,27 +312,27 @@ where
     pub fn batch_check<R: RngCore>(
         vk: &VerifierKey<E>,
         commitments: &[Commitment<E>],
-        points: &[E::Fr],
-        values: &[E::Fr],
+        points: &[E::ScalarField],
+        values: &[E::ScalarField],
         proofs: &[Proof<E>],
         rng: &mut R,
     ) -> Result<bool, Error> {
         let check_time =
             start_timer!(|| format!("Checking {} evaluation proofs", commitments.len()));
 
-        let mut total_c = <E::G1Projective>::zero();
-        let mut total_w = <E::G1Projective>::zero();
+        let mut total_c = <E::G1>::zero();
+        let mut total_w = <E::G1>::zero();
 
         let combination_time = start_timer!(|| "Combining commitments and proofs");
-        let mut randomizer = E::Fr::one();
+        let mut randomizer = E::ScalarField::one();
         // Instead of multiplying g and gamma_g in each turn, we simply accumulate
         // their coefficients and perform a final multiplication at the end.
-        let mut g_multiplier = E::Fr::zero();
-        let mut gamma_g_multiplier = E::Fr::zero();
+        let mut g_multiplier = E::ScalarField::zero();
+        let mut gamma_g_multiplier = E::ScalarField::zero();
         for (((c, z), v), proof) in commitments.iter().zip(points).zip(values).zip(proofs) {
             let w = proof.w;
             let mut temp = w.mul(*z);
-            temp.add_assign_mixed(&c.0);
+            temp += &c.0;
             let c = temp;
             g_multiplier += &(randomizer * v);
             if let Some(random_v) = proof.random_v {
@@ -353,15 +349,16 @@ where
         end_timer!(combination_time);
 
         let to_affine_time = start_timer!(|| "Converting results to affine for pairing");
-        let affine_points = E::G1Projective::batch_normalization_into_affine(&[-total_w, total_c]);
+        let affine_points = E::G1::normalize_batch(&[-total_w, total_c]);
         let (total_w, total_c) = (affine_points[0], affine_points[1]);
         end_timer!(to_affine_time);
 
         let pairing_time = start_timer!(|| "Performing product of pairings");
-        let result = E::product_of_pairings(&[
-            (total_w.into(), vk.prepared_beta_h.clone()),
-            (total_c.into(), vk.prepared_h.clone()),
-        ])
+        let result = E::multi_pairing(
+            [total_w, total_c],
+            [vk.prepared_beta_h.clone(), vk.prepared_h.clone()],
+        )
+        .0
         .is_one();
         end_timer!(pairing_time);
         end_timer!(check_time, || format!("Result: {}", result));
@@ -403,7 +400,7 @@ where
         supported_degree: usize,
         max_degree: usize,
         enforced_degree_bounds: Option<&[usize]>,
-        p: &'a LabeledPolynomial<E::Fr, P>,
+        p: &'a LabeledPolynomial<E::ScalarField, P>,
     ) -> Result<(), Error> {
         if let Some(bound) = p.degree_bound() {
             let enforced_degree_bounds =
@@ -456,15 +453,15 @@ mod tests {
     use ark_bls12_377::Bls12_377;
     use ark_bls12_381::Bls12_381;
     use ark_bls12_381::Fr;
-    use ark_ec::PairingEngine;
+    use ark_ec::pairing::Pairing;
     use ark_poly::univariate::DensePolynomial as DensePoly;
     use ark_std::test_rng;
 
-    type UniPoly_381 = DensePoly<<Bls12_381 as PairingEngine>::Fr>;
-    type UniPoly_377 = DensePoly<<Bls12_377 as PairingEngine>::Fr>;
+    type UniPoly_381 = DensePoly<<Bls12_381 as Pairing>::ScalarField>;
+    type UniPoly_377 = DensePoly<<Bls12_377 as Pairing>::ScalarField>;
     type KZG_Bls12_381 = KZG10<Bls12_381, UniPoly_381>;
 
-    impl<E: PairingEngine, P: DenseUVPolynomial<E::Fr>> KZG10<E, P> {
+    impl<E: Pairing, P: DenseUVPolynomial<E::ScalarField>> KZG10<E, P> {
         /// Specializes the public parameters for a given maximum degree `d` for polynomials
         /// `d` should be less that `pp.max_degree()`.
         pub(crate) fn trim(
@@ -524,8 +521,8 @@ mod tests {
 
     fn end_to_end_test_template<E, P>() -> Result<(), Error>
     where
-        E: PairingEngine,
-        P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+        E: Pairing,
+        P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
         for<'a, 'b> &'a P: Div<&'b P, Output = P>,
     {
         let rng = &mut test_rng();
@@ -539,7 +536,7 @@ mod tests {
             let p = P::rand(degree, rng);
             let hiding_bound = Some(1);
             let (comm, rand) = KZG10::<E, P>::commit(&ck, &p, hiding_bound, Some(rng))?;
-            let point = E::Fr::rand(rng);
+            let point = E::ScalarField::rand(rng);
             let value = p.evaluate(&point);
             let proof = KZG10::<E, P>::open(&ck, &p, point, &rand)?;
             assert!(
@@ -555,8 +552,8 @@ mod tests {
 
     fn linear_polynomial_test_template<E, P>() -> Result<(), Error>
     where
-        E: PairingEngine,
-        P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+        E: Pairing,
+        P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
         for<'a, 'b> &'a P: Div<&'b P, Output = P>,
     {
         let rng = &mut test_rng();
@@ -567,7 +564,7 @@ mod tests {
             let p = P::rand(1, rng);
             let hiding_bound = Some(1);
             let (comm, rand) = KZG10::<E, P>::commit(&ck, &p, hiding_bound, Some(rng))?;
-            let point = E::Fr::rand(rng);
+            let point = E::ScalarField::rand(rng);
             let value = p.evaluate(&point);
             let proof = KZG10::<E, P>::open(&ck, &p, point, &rand)?;
             assert!(
@@ -583,8 +580,8 @@ mod tests {
 
     fn batch_check_test_template<E, P>() -> Result<(), Error>
     where
-        E: PairingEngine,
-        P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+        E: Pairing,
+        P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
         for<'a, 'b> &'a P: Div<&'b P, Output = P>,
     {
         let rng = &mut test_rng();
@@ -603,7 +600,7 @@ mod tests {
                 let p = P::rand(degree, rng);
                 let hiding_bound = Some(1);
                 let (comm, rand) = KZG10::<E, P>::commit(&ck, &p, hiding_bound, Some(rng))?;
-                let point = E::Fr::rand(rng);
+                let point = E::ScalarField::rand(rng);
                 let value = p.evaluate(&point);
                 let proof = KZG10::<E, P>::open(&ck, &p, point, &rand)?;
 
diff --git a/src/lib.rs b/src/lib.rs
index 068906b..fdbd8f0 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,7 +7,7 @@
 #![deny(missing_docs)]
 #![deny(unused_imports)]
 #![deny(renamed_and_removed_lints, stable_features, unused_allocation)]
-#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)]
+#![deny(unused_comparisons, bare_trait_objects, unused_must_use)]
 #![forbid(unsafe_code)]
 
 #[allow(unused)]
@@ -25,7 +25,6 @@ use ark_std::{
     fmt::Debug,
     hash::Hash,
     iter::FromIterator,
-    rc::Rc,
     string::{String, ToString},
     vec::Vec,
 };
@@ -61,7 +60,7 @@ macro_rules! eprintln {
     () => {};
     ($($arg: tt)*) => {};
 }
-#[cfg(not(feature = "std"))]
+#[cfg(all(test, not(feature = "std")))]
 macro_rules! println {
     () => {};
     ($($arg: tt)*) => {};
@@ -109,7 +108,7 @@ pub mod challenge;
 pub mod multilinear_pc;
 
 use crate::challenge::ChallengeGenerator;
-use ark_sponge::{CryptographicSponge, FieldElementSize};
+use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize};
 /// Multivariate polynomial commitment based on the construction in
 /// [[PST13]][pst] with batching and (optional) hiding property inspired
 /// by the univariate scheme in [[CHMMVW20, "Marlin"]][marlin]
@@ -163,7 +162,7 @@ pub trait PolynomialCommitment<F: PrimeField, P: Polynomial<F>, S: Cryptographic
     /// The commitment randomness.
     type Randomness: PCRandomness;
     /// The evaluation proof for a single point.
-    type Proof: PCProof + Clone;
+    type Proof: Clone;
     /// The evaluation proof for a query set.
     type BatchProof: Clone
         + From<Vec<Self::Proof>>
@@ -534,8 +533,8 @@ fn lc_query_set_to_poly_query_set<'a, F: Field, T: Clone + Ord>(
 #[cfg(test)]
 pub mod tests {
     use crate::*;
+    use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge};
     use ark_poly::Polynomial;
-    use ark_sponge::poseidon::{PoseidonConfig, PoseidonSponge};
     use ark_std::rand::{
         distributions::{Distribution, Uniform},
         Rng, SeedableRng,
diff --git a/src/marlin/marlin_pc/data_structures.rs b/src/marlin/marlin_pc/data_structures.rs
index 955312d..af75bdc 100644
--- a/src/marlin/marlin_pc/data_structures.rs
+++ b/src/marlin/marlin_pc/data_structures.rs
@@ -2,10 +2,10 @@ use crate::{
     DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey,
     PCRandomness, PCVerifierKey, Vec,
 };
-use ark_ec::{PairingEngine, ProjectiveCurve};
+use ark_ec::pairing::Pairing;
+use ark_ec::Group;
 use ark_ff::{Field, PrimeField, ToConstraintField};
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
-use ark_std::io::{Read, Write};
+use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
 use ark_std::ops::{Add, AddAssign};
 use ark_std::rand::RngCore;
 
@@ -22,7 +22,7 @@ pub type UniversalParams<E> = kzg10::UniversalParams<E>;
     Clone(bound = ""),
     Debug(bound = "")
 )]
-pub struct CommitterKey<E: PairingEngine> {
+pub struct CommitterKey<E: Pairing> {
     /// The key used to commit to polynomials.
     pub powers: Vec<E::G1Affine>,
 
@@ -42,7 +42,7 @@ pub struct CommitterKey<E: PairingEngine> {
     pub max_degree: usize,
 }
 
-impl<E: PairingEngine> CommitterKey<E> {
+impl<E: Pairing> CommitterKey<E> {
     /// Obtain powers for the underlying KZG10 construction
     pub fn powers<'a>(&'a self) -> kzg10::Powers<'a, E> {
         kzg10::Powers {
@@ -82,7 +82,7 @@ impl<E: PairingEngine> CommitterKey<E> {
     }
 }
 
-impl<E: PairingEngine> PCCommitterKey for CommitterKey<E> {
+impl<E: Pairing> PCCommitterKey for CommitterKey<E> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -95,7 +95,7 @@ impl<E: PairingEngine> PCCommitterKey for CommitterKey<E> {
 /// `VerifierKey` is used to check evaluation proofs for a given commitment.
 #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)]
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// The verification key for the underlying KZG10 scheme.
     pub vk: kzg10::VerifierKey<E>,
     /// Information required to enforce degree bounds. Each pair
@@ -111,7 +111,7 @@ pub struct VerifierKey<E: PairingEngine> {
     pub supported_degree: usize,
 }
 
-impl<E: PairingEngine> VerifierKey<E> {
+impl<E: Pairing> VerifierKey<E> {
     /// Find the appropriate shift for the degree bound.
     pub fn get_shift_power(&self, bound: usize) -> Option<E::G1Affine> {
         self.degree_bounds_and_shift_powers.as_ref().and_then(|v| {
@@ -122,7 +122,7 @@ impl<E: PairingEngine> VerifierKey<E> {
     }
 }
 
-impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
+impl<E: Pairing> PCVerifierKey for VerifierKey<E> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -132,18 +132,18 @@ impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
     }
 }
 
-impl<E: PairingEngine> ToConstraintField<<E::Fq as Field>::BasePrimeField> for VerifierKey<E>
+impl<E: Pairing> ToConstraintField<<E::TargetField as Field>::BasePrimeField> for VerifierKey<E>
 where
-    E::G1Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
-    E::G2Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
+    E::G1Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
+    E::G2Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
 {
-    fn to_field_elements(&self) -> Option<Vec<<E::Fq as Field>::BasePrimeField>> {
+    fn to_field_elements(&self) -> Option<Vec<<E::TargetField as Field>::BasePrimeField>> {
         let mut res = Vec::new();
         res.extend_from_slice(&self.vk.to_field_elements().unwrap());
 
         if let Some(degree_bounds_and_shift_powers) = &self.degree_bounds_and_shift_powers {
             for (d, shift_power) in degree_bounds_and_shift_powers.iter() {
-                let d_elem: <E::Fq as Field>::BasePrimeField = (*d as u64).into();
+                let d_elem: <E::TargetField as Field>::BasePrimeField = (*d as u64).into();
 
                 res.push(d_elem);
                 res.extend_from_slice(&shift_power.to_field_elements().unwrap());
@@ -157,7 +157,7 @@ where
 /// `PreparedVerifierKey` is used to check evaluation proofs for a given commitment.
 #[derive(Derivative)]
 #[derivative(Clone(bound = ""), Debug(bound = ""))]
-pub struct PreparedVerifierKey<E: PairingEngine> {
+pub struct PreparedVerifierKey<E: Pairing> {
     /// The verification key for the underlying KZG10 scheme.
     pub prepared_vk: kzg10::PreparedVerifierKey<E>,
     /// Information required to enforce degree bounds. Each pair
@@ -172,12 +172,12 @@ pub struct PreparedVerifierKey<E: PairingEngine> {
     pub supported_degree: usize,
 }
 
-impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
+impl<E: Pairing> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
     /// prepare `PreparedVerifierKey` from `VerifierKey`
     fn prepare(vk: &VerifierKey<E>) -> Self {
         let prepared_vk = kzg10::PreparedVerifierKey::<E>::prepare(&vk.vk);
 
-        let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
 
         let prepared_degree_bounds_and_shift_powers: Option<Vec<(usize, Vec<E::G1Affine>)>> =
             if vk.degree_bounds_and_shift_powers.is_some() {
@@ -189,7 +189,7 @@ impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifie
                 for (d, shift_power) in degree_bounds_and_shift_powers {
                     let mut prepared_shift_power = Vec::<E::G1Affine>::new();
 
-                    let mut cur = E::G1Projective::from(shift_power.clone());
+                    let mut cur = E::G1::from(shift_power.clone());
                     for _ in 0..supported_bits {
                         prepared_shift_power.push(cur.clone().into());
                         cur.double_in_place();
@@ -223,7 +223,7 @@ impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifie
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Commitment<E: PairingEngine> {
+pub struct Commitment<E: Pairing> {
     /// A KZG10 commitment to the polynomial.
     pub comm: kzg10::Commitment<E>,
 
@@ -233,11 +233,11 @@ pub struct Commitment<E: PairingEngine> {
     pub shifted_comm: Option<kzg10::Commitment<E>>,
 }
 
-impl<E: PairingEngine> ToConstraintField<<E::Fq as Field>::BasePrimeField> for Commitment<E>
+impl<E: Pairing> ToConstraintField<<E::TargetField as Field>::BasePrimeField> for Commitment<E>
 where
-    E::G1Affine: ToConstraintField<<E::Fq as Field>::BasePrimeField>,
+    E::G1Affine: ToConstraintField<<E::TargetField as Field>::BasePrimeField>,
 {
-    fn to_field_elements(&self) -> Option<Vec<<E::Fq as Field>::BasePrimeField>> {
+    fn to_field_elements(&self) -> Option<Vec<<E::TargetField as Field>::BasePrimeField>> {
         let mut res = Vec::new();
         res.extend_from_slice(&self.comm.to_field_elements().unwrap());
 
@@ -249,7 +249,7 @@ where
     }
 }
 
-impl<E: PairingEngine> PCCommitment for Commitment<E> {
+impl<E: Pairing> PCCommitment for Commitment<E> {
     #[inline]
     fn empty() -> Self {
         Self {
@@ -272,12 +272,12 @@ impl<E: PairingEngine> PCCommitment for Commitment<E> {
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct PreparedCommitment<E: PairingEngine> {
+pub struct PreparedCommitment<E: Pairing> {
     pub(crate) prepared_comm: kzg10::PreparedCommitment<E>,
     pub(crate) shifted_comm: Option<kzg10::Commitment<E>>,
 }
 
-impl<E: PairingEngine> PCPreparedCommitment<Commitment<E>> for PreparedCommitment<E> {
+impl<E: Pairing> PCPreparedCommitment<Commitment<E>> for PreparedCommitment<E> {
     /// Prepare commitment to a polynomial that optionally enforces a degree bound.
     fn prepare(comm: &Commitment<E>) -> Self {
         let prepared_comm = kzg10::PreparedCommitment::<E>::prepare(&comm.comm);
diff --git a/src/marlin/marlin_pc/mod.rs b/src/marlin/marlin_pc/mod.rs
index c52f279..93856b0 100644
--- a/src/marlin/marlin_pc/mod.rs
+++ b/src/marlin/marlin_pc/mod.rs
@@ -3,7 +3,9 @@ use crate::{BTreeMap, BTreeSet, ToString, Vec};
 use crate::{BatchLCProof, Error, Evaluations, QuerySet};
 use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination};
 use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment};
-use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
+use ark_ec::pairing::Pairing;
+use ark_ec::AffineRepr;
+use ark_ec::CurveGroup;
 use ark_ff::Zero;
 use ark_poly::DenseUVPolynomial;
 use ark_std::rand::RngCore;
@@ -11,7 +13,7 @@ use ark_std::{marker::PhantomData, ops::Div, vec};
 
 mod data_structures;
 use crate::challenge::ChallengeGenerator;
-use ark_sponge::CryptographicSponge;
+use ark_crypto_primitives::sponge::CryptographicSponge;
 pub use data_structures::*;
 
 /// Polynomial commitment based on [[KZG10]][kzg], with degree enforcement, batching,
@@ -26,13 +28,13 @@ pub use data_structures::*;
 ///
 /// [kzg]: http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf
 /// [marlin]: https://eprint.iacr.org/2019/104
-pub struct MarlinKZG10<E: PairingEngine, P: DenseUVPolynomial<E::Fr>, S: CryptographicSponge> {
+pub struct MarlinKZG10<E: Pairing, P: DenseUVPolynomial<E::ScalarField>, S: CryptographicSponge> {
     _engine: PhantomData<E>,
     _poly: PhantomData<P>,
     _sponge: PhantomData<S>,
 }
 
-pub(crate) fn shift_polynomial<E: PairingEngine, P: DenseUVPolynomial<E::Fr>>(
+pub(crate) fn shift_polynomial<E: Pairing, P: DenseUVPolynomial<E::ScalarField>>(
     ck: &CommitterKey<E>,
     p: &P,
     degree_bound: usize,
@@ -47,16 +49,16 @@ pub(crate) fn shift_polynomial<E: PairingEngine, P: DenseUVPolynomial<E::Fr>>(
         let largest_enforced_degree_bound = enforced_degree_bounds.last().unwrap();
 
         let mut shifted_polynomial_coeffs =
-            vec![E::Fr::zero(); largest_enforced_degree_bound - degree_bound];
+            vec![E::ScalarField::zero(); largest_enforced_degree_bound - degree_bound];
         shifted_polynomial_coeffs.extend_from_slice(&p.coeffs());
         P::from_coefficients_vec(shifted_polynomial_coeffs)
     }
 }
 
-impl<E, P, S> PolynomialCommitment<E::Fr, P, S> for MarlinKZG10<E, P, S>
+impl<E, P, S> PolynomialCommitment<E::ScalarField, P, S> for MarlinKZG10<E, P, S>
 where
-    E: PairingEngine,
-    P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+    E: Pairing,
+    P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
     S: CryptographicSponge,
     for<'a, 'b> &'a P: Div<&'b P, Output = P>,
 {
@@ -66,7 +68,7 @@ where
     type PreparedVerifierKey = PreparedVerifierKey<E>;
     type Commitment = Commitment<E>;
     type PreparedCommitment = PreparedCommitment<E>;
-    type Randomness = Randomness<E::Fr, P>;
+    type Randomness = Randomness<E::ScalarField, P>;
     type Proof = kzg10::Proof<E>;
     type BatchProof = Vec<Self::Proof>;
     type Error = Error;
@@ -175,7 +177,7 @@ where
     /// Outputs a commitment to `polynomial`.
     fn commit<'a>(
         ck: &Self::CommitterKey,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         rng: Option<&mut dyn RngCore>,
     ) -> Result<
         (
@@ -248,10 +250,10 @@ where
     /// On input a polynomial `p` and a point `point`, outputs a proof for the same.
     fn open<'a>(
         ck: &Self::CommitterKey,
-        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         _commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &'a P::Point,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<Self::Proof, Self::Error>
@@ -312,7 +314,7 @@ where
         }
         let proof_time = start_timer!(|| "Creating proof for unshifted polynomials");
         let proof = kzg10::KZG10::open(&ck.powers(), &p, *point, &r)?;
-        let mut w = proof.w.into_projective();
+        let mut w = proof.w.into_group();
         let mut random_v = proof.random_v;
         end_timer!(proof_time);
 
@@ -327,7 +329,7 @@ where
             )?;
             end_timer!(proof_time);
 
-            w += &shifted_proof.w.into_projective();
+            w += &shifted_proof.w.into_group();
             if let Some(shifted_random_v) = shifted_proof.random_v {
                 random_v = random_v.map(|v| v + &shifted_random_v);
             }
@@ -345,9 +347,9 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &'a P::Point,
-        values: impl IntoIterator<Item = E::Fr>,
+        values: impl IntoIterator<Item = E::ScalarField>,
         proof: &Self::Proof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<bool, Self::Error>
     where
@@ -371,9 +373,9 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        values: &Evaluations<E::Fr, P::Point>,
+        values: &Evaluations<E::ScalarField, P::Point>,
         proof: &Self::BatchProof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -403,14 +405,14 @@ where
 
     fn open_combinations<'a>(
         ck: &Self::CommitterKey,
-        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         rng: Option<&mut dyn RngCore>,
-    ) -> Result<BatchLCProof<E::Fr, Self::BatchProof>, Self::Error>
+    ) -> Result<BatchLCProof<E::ScalarField, Self::BatchProof>, Self::Error>
     where
         P: 'a,
         Self::Randomness: 'a,
@@ -432,12 +434,12 @@ where
     /// committed in `labeled_commitments`.
     fn check_combinations<'a, R: RngCore>(
         vk: &Self::VerifierKey,
-        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
+        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        evaluations: &Evaluations<E::Fr, P::Point>,
-        proof: &BatchLCProof<E::Fr, Self::BatchProof>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        evaluations: &Evaluations<E::ScalarField, P::Point>,
+        proof: &BatchLCProof<E::ScalarField, Self::BatchProof>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -459,10 +461,10 @@ where
     /// of the polynomials at the points in the query set.
     fn batch_open<'a>(
         ck: &CommitterKey<E>,
-        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Commitment<E>>>,
         query_set: &QuerySet<P::Point>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         rng: Option<&mut dyn RngCore>,
     ) -> Result<Vec<kzg10::Proof<E>>, Error>
@@ -538,41 +540,41 @@ mod tests {
     use super::MarlinKZG10;
     use ark_bls12_377::Bls12_377;
     use ark_bls12_381::Bls12_381;
-    use ark_ec::PairingEngine;
+    use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
+    use ark_ec::pairing::Pairing;
     use ark_ff::UniformRand;
     use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial};
-    use ark_sponge::poseidon::PoseidonSponge;
     use rand_chacha::ChaCha20Rng;
 
-    type UniPoly_381 = DensePoly<<Bls12_381 as PairingEngine>::Fr>;
-    type UniPoly_377 = DensePoly<<Bls12_377 as PairingEngine>::Fr>;
+    type UniPoly_381 = DensePoly<<Bls12_381 as Pairing>::ScalarField>;
+    type UniPoly_377 = DensePoly<<Bls12_377 as Pairing>::ScalarField>;
 
     type PC<E, P, S> = MarlinKZG10<E, P, S>;
 
-    type Sponge_Bls12_381 = PoseidonSponge<<Bls12_381 as PairingEngine>::Fr>;
-    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as PairingEngine>::Fr>;
+    type Sponge_Bls12_381 = PoseidonSponge<<Bls12_381 as Pairing>::ScalarField>;
+    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as Pairing>::ScalarField>;
 
     type PC_Bls12_381 = PC<Bls12_381, UniPoly_381, Sponge_Bls12_381>;
     type PC_Bls12_377 = PC<Bls12_377, UniPoly_377, Sponge_Bls12_377>;
 
-    fn rand_poly<E: PairingEngine>(
+    fn rand_poly<E: Pairing>(
         degree: usize,
         _: Option<usize>,
         rng: &mut ChaCha20Rng,
-    ) -> DensePoly<E::Fr> {
-        DensePoly::<E::Fr>::rand(degree, rng)
+    ) -> DensePoly<E::ScalarField> {
+        DensePoly::<E::ScalarField>::rand(degree, rng)
     }
 
-    fn constant_poly<E: PairingEngine>(
+    fn constant_poly<E: Pairing>(
         _: usize,
         _: Option<usize>,
         rng: &mut ChaCha20Rng,
-    ) -> DensePoly<E::Fr> {
-        DensePoly::<E::Fr>::from_coefficients_slice(&[E::Fr::rand(rng)])
+    ) -> DensePoly<E::ScalarField> {
+        DensePoly::<E::ScalarField>::from_coefficients_slice(&[E::ScalarField::rand(rng)])
     }
 
-    fn rand_point<E: PairingEngine>(_: Option<usize>, rng: &mut ChaCha20Rng) -> E::Fr {
-        E::Fr::rand(rng)
+    fn rand_point<E: Pairing>(_: Option<usize>, rng: &mut ChaCha20Rng) -> E::ScalarField {
+        E::ScalarField::rand(rng)
     }
 
     #[test]
diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs
index 62c9d31..8ccf300 100644
--- a/src/marlin/marlin_pst13_pc/data_structures.rs
+++ b/src/marlin/marlin_pst13_pc/data_structures.rs
@@ -1,9 +1,8 @@
 use crate::{BTreeMap, Vec};
 use crate::{
-    PCCommitterKey, PCPreparedVerifierKey, PCProof, PCRandomness, PCUniversalParams, PCVerifierKey,
+    PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey,
 };
-use ark_ec::PairingEngine;
-use ark_ff::Zero;
+use ark_ec::pairing::Pairing;
 use ark_poly::DenseMVPolynomial;
 use ark_std::{
     io::{Read, Write},
@@ -11,7 +10,9 @@ use ark_std::{
     ops::{Add, AddAssign, Index},
 };
 
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
+use ark_serialize::{
+    CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate,
+};
 use ark_std::rand::RngCore;
 
 /// `UniversalParams` are the universal parameters for the MarlinPST13 scheme.
@@ -19,9 +20,9 @@ use ark_std::rand::RngCore;
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
 pub struct UniversalParams<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     /// Contains group elements corresponding to all possible monomials with
     /// `num_vars` and maximum degree `max_degree` evaluated at `\beta`
@@ -48,126 +49,79 @@ where
     pub max_degree: usize,
 }
 
-impl<E, P> CanonicalSerialize for UniversalParams<E, P>
+impl<E, P> Valid for UniversalParams<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize(&mut writer)?;
-        self.gamma_g.serialize(&mut writer)?;
-        self.powers_of_gamma_g.serialize(&mut writer)?;
-        self.h.serialize(&mut writer)?;
-        self.beta_h.serialize(&mut writer)?;
-        self.num_vars.serialize(&mut writer)?;
-        self.max_degree.serialize(&mut writer)
-    }
-
-    fn serialized_size(&self) -> usize {
-        self.powers_of_g.serialized_size()
-            + self.gamma_g.serialized_size()
-            + self.powers_of_gamma_g.serialized_size()
-            + self.h.serialized_size()
-            + self.beta_h.serialized_size()
-            + self.num_vars.serialized_size()
-            + self.max_degree.serialized_size()
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_uncompressed(&mut writer)?;
-        self.gamma_g.serialize_uncompressed(&mut writer)?;
-        self.powers_of_gamma_g.serialize_uncompressed(&mut writer)?;
-        self.h.serialize_uncompressed(&mut writer)?;
-        self.beta_h.serialize_uncompressed(&mut writer)?;
-        self.num_vars.serialize_uncompressed(&mut writer)?;
-        self.max_degree.serialize_uncompressed(&mut writer)
+    fn check(&self) -> Result<(), SerializationError> {
+        self.powers_of_g.check()?;
+        self.gamma_g.check()?;
+        self.powers_of_gamma_g.check()?;
+        self.h.check()?;
+        self.beta_h.check()?;
+        self.num_vars.check()?;
+        self.max_degree.check()?;
+        Ok(())
     }
+}
 
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.powers_of_g.serialize_unchecked(&mut writer)?;
-        self.gamma_g.serialize_unchecked(&mut writer)?;
-        self.powers_of_gamma_g.serialize_unchecked(&mut writer)?;
-        self.h.serialize_unchecked(&mut writer)?;
-        self.beta_h.serialize_unchecked(&mut writer)?;
-        self.num_vars.serialize_unchecked(&mut writer)?;
-        self.max_degree.serialize_unchecked(&mut writer)
+impl<E, P> CanonicalSerialize for UniversalParams<E, P>
+where
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
+{
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.powers_of_g
+            .serialize_with_mode(&mut writer, compress)?;
+        self.gamma_g.serialize_with_mode(&mut writer, compress)?;
+        self.powers_of_gamma_g
+            .serialize_with_mode(&mut writer, compress)?;
+        self.h.serialize_with_mode(&mut writer, compress)?;
+        self.beta_h.serialize_with_mode(&mut writer, compress)?;
+        self.num_vars.serialize_with_mode(&mut writer, compress)?;
+        self.max_degree.serialize_with_mode(&mut writer, compress)
     }
 
-    fn uncompressed_size(&self) -> usize {
-        self.powers_of_g.uncompressed_size()
-            + self.gamma_g.uncompressed_size()
-            + self.powers_of_gamma_g.uncompressed_size()
-            + self.h.uncompressed_size()
-            + self.beta_h.uncompressed_size()
-            + self.num_vars.uncompressed_size()
-            + self.max_degree.uncompressed_size()
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.powers_of_g.serialized_size(compress)
+            + self.gamma_g.serialized_size(compress)
+            + self.powers_of_gamma_g.serialized_size(compress)
+            + self.h.serialized_size(compress)
+            + self.beta_h.serialized_size(compress)
+            + self.num_vars.serialized_size(compress)
+            + self.max_degree.serialized_size(compress)
     }
 }
 
 impl<E, P> CanonicalDeserialize for UniversalParams<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = BTreeMap::<P::Term, E::G1Affine>::deserialize(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize(&mut reader)?;
-        let powers_of_gamma_g = Vec::<Vec<E::G1Affine>>::deserialize(&mut reader)?;
-        let h = E::G2Affine::deserialize(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize(&mut reader)?;
-        let num_vars = usize::deserialize(&mut reader)?;
-        let max_degree = usize::deserialize(&mut reader)?;
-
-        let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
-            powers_of_g,
-            gamma_g,
-            powers_of_gamma_g,
-            h,
-            beta_h,
-            prepared_h: h.into(),
-            prepared_beta_h,
-            num_vars,
-            max_degree,
-        })
-    }
-
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = BTreeMap::<P::Term, E::G1Affine>::deserialize_uncompressed(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let powers_of_gamma_g = Vec::<Vec<E::G1Affine>>::deserialize_uncompressed(&mut reader)?;
-        let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize_uncompressed(&mut reader)?;
-        let num_vars = usize::deserialize_uncompressed(&mut reader)?;
-        let max_degree = usize::deserialize_uncompressed(&mut reader)?;
-
-        let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
-            powers_of_g,
-            gamma_g,
-            powers_of_gamma_g,
-            h,
-            beta_h,
-            prepared_h: h.into(),
-            prepared_beta_h,
-            num_vars,
-            max_degree,
-        })
-    }
-
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let powers_of_g = BTreeMap::<P::Term, E::G1Affine>::deserialize_unchecked(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let powers_of_gamma_g = Vec::<Vec<E::G1Affine>>::deserialize_unchecked(&mut reader)?;
-        let h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize_unchecked(&mut reader)?;
-        let num_vars = usize::deserialize_unchecked(&mut reader)?;
-        let max_degree = usize::deserialize_unchecked(&mut reader)?;
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let powers_of_g = BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let powers_of_gamma_g = Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let beta_h =
+            Vec::<E::G2Affine>::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
 
         let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
+        let result = Self {
             powers_of_g,
             gamma_g,
             powers_of_gamma_g,
@@ -177,15 +131,19 @@ where
             prepared_beta_h,
             num_vars,
             max_degree,
-        })
+        };
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
+        Ok(result)
     }
 }
 
 impl<E, P> PCUniversalParams for UniversalParams<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     fn max_degree(&self) -> usize {
         self.max_degree
@@ -198,9 +156,9 @@ where
 #[derivative(Hash(bound = ""), Clone(bound = ""), Debug(bound = ""))]
 pub struct CommitterKey<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     /// Contains group elements corresponding to all possible monomials with
     /// `num_vars` and maximum degree `supported_degree` evaluated at `\beta`
@@ -222,9 +180,9 @@ where
 
 impl<E, P> PCCommitterKey for CommitterKey<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     fn max_degree(&self) -> usize {
         self.max_degree
@@ -238,7 +196,7 @@ where
 /// `VerifierKey` is used to check evaluation proofs for a given commitment.
 #[derive(Derivative)]
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// The generator of G1.
     pub g: E::G1Affine,
     /// The generator of G1 that is used for making a commitment hiding.
@@ -263,117 +221,70 @@ pub struct VerifierKey<E: PairingEngine> {
     /// from.
     pub max_degree: usize,
 }
+impl<E: Pairing> Valid for VerifierKey<E> {
+    fn check(&self) -> Result<(), SerializationError> {
+        self.g.check()?;
+        self.gamma_g.check()?;
+        self.h.check()?;
+        self.beta_h.check()?;
+
+        if self.num_vars == 0 {
+            return Err(SerializationError::InvalidData);
+        }
+        if self.supported_degree == 0 {
+            return Err(SerializationError::InvalidData);
+        }
+        if self.max_degree == 0 || self.max_degree < self.supported_degree {
+            return Err(SerializationError::InvalidData);
+        }
 
-impl<E: PairingEngine> CanonicalSerialize for VerifierKey<E> {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize(&mut writer)?;
-        self.gamma_g.serialize(&mut writer)?;
-        self.h.serialize(&mut writer)?;
-        self.beta_h.serialize(&mut writer)?;
-        self.num_vars.serialize(&mut writer)?;
-        self.supported_degree.serialize(&mut writer)?;
-        self.max_degree.serialize(&mut writer)
-    }
-
-    fn serialized_size(&self) -> usize {
-        self.g.serialized_size()
-            + self.gamma_g.serialized_size()
-            + self.h.serialized_size()
-            + self.beta_h.serialized_size()
-            + self.num_vars.serialized_size()
-            + self.supported_degree.serialized_size()
-            + self.max_degree.serialized_size()
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_uncompressed(&mut writer)?;
-        self.gamma_g.serialize_uncompressed(&mut writer)?;
-        self.h.serialize_uncompressed(&mut writer)?;
-        self.beta_h.serialize_uncompressed(&mut writer)?;
-        self.num_vars.serialize_uncompressed(&mut writer)?;
-        self.supported_degree.serialize_uncompressed(&mut writer)?;
-        self.max_degree.serialize_uncompressed(&mut writer)
-    }
-
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_unchecked(&mut writer)?;
-        self.gamma_g.serialize_unchecked(&mut writer)?;
-        self.h.serialize_unchecked(&mut writer)?;
-        self.beta_h.serialize_unchecked(&mut writer)?;
-        self.num_vars.serialize_unchecked(&mut writer)?;
-        self.supported_degree.serialize_unchecked(&mut writer)?;
-        self.max_degree.serialize_unchecked(&mut writer)
-    }
-
-    fn uncompressed_size(&self) -> usize {
-        self.g.uncompressed_size()
-            + self.gamma_g.uncompressed_size()
-            + self.h.uncompressed_size()
-            + self.beta_h.uncompressed_size()
-            + self.num_vars.uncompressed_size()
-            + self.supported_degree.uncompressed_size()
-            + self.max_degree.uncompressed_size()
+        Ok(())
     }
 }
-
-impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize(&mut reader)?;
-        let h = E::G2Affine::deserialize(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize(&mut reader)?;
-        let num_vars = usize::deserialize(&mut reader)?;
-        let supported_degree = usize::deserialize(&mut reader)?;
-        let max_degree = usize::deserialize(&mut reader)?;
-
-        let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h: h.into(),
-            prepared_beta_h,
-            num_vars,
-            supported_degree,
-            max_degree,
-        })
+impl<E: Pairing> CanonicalSerialize for VerifierKey<E> {
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.g.serialize_with_mode(&mut writer, compress)?;
+        self.gamma_g.serialize_with_mode(&mut writer, compress)?;
+        self.h.serialize_with_mode(&mut writer, compress)?;
+        self.beta_h.serialize_with_mode(&mut writer, compress)?;
+        self.num_vars.serialize_with_mode(&mut writer, compress)?;
+        self.supported_degree
+            .serialize_with_mode(&mut writer, compress)?;
+        self.max_degree.serialize_with_mode(&mut writer, compress)
     }
 
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize_uncompressed(&mut reader)?;
-        let num_vars = usize::deserialize_uncompressed(&mut reader)?;
-        let supported_degree = usize::deserialize_uncompressed(&mut reader)?;
-        let max_degree = usize::deserialize_uncompressed(&mut reader)?;
-
-        let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h: h.into(),
-            prepared_beta_h,
-            num_vars,
-            supported_degree,
-            max_degree,
-        })
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.g.serialized_size(compress)
+            + self.gamma_g.serialized_size(compress)
+            + self.h.serialized_size(compress)
+            + self.beta_h.serialized_size(compress)
+            + self.num_vars.serialized_size(compress)
+            + self.supported_degree.serialized_size(compress)
+            + self.max_degree.serialized_size(compress)
     }
+}
 
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let beta_h = Vec::<E::G2Affine>::deserialize_unchecked(&mut reader)?;
-        let num_vars = usize::deserialize_unchecked(&mut reader)?;
-        let supported_degree = usize::deserialize_unchecked(&mut reader)?;
-        let max_degree = usize::deserialize_unchecked(&mut reader)?;
+impl<E: Pairing> CanonicalDeserialize for VerifierKey<E> {
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let beta_h =
+            Vec::<E::G2Affine>::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let supported_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
 
         let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect();
-        Ok(Self {
+        let result = Self {
             g,
             gamma_g,
             h,
@@ -383,11 +294,15 @@ impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
             num_vars,
             supported_degree,
             max_degree,
-        })
+        };
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
+        Ok(result)
     }
 }
 
-impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
+impl<E: Pairing> PCVerifierKey for VerifierKey<E> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -400,7 +315,7 @@ impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
 /// Nothing to do to prepare this verifier key (for now).
 pub type PreparedVerifierKey<E> = VerifierKey<E>;
 
-impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
+impl<E: Pairing> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
     /// prepare `PreparedVerifierKey` from `VerifierKey`
     fn prepare(vk: &VerifierKey<E>) -> Self {
         vk.clone()
@@ -418,9 +333,9 @@ impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifie
 )]
 pub struct Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     /// A multivariate polynomial where each monomial is univariate with random coefficient
     pub blinding_polynomial: P,
@@ -429,9 +344,9 @@ where
 
 impl<E, P> Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     /// Does `self` provide any hiding properties to the corresponding commitment?
     /// `self.is_hiding() == true` only if the underlying polynomial is non-zero.
@@ -449,9 +364,9 @@ where
 
 impl<E, P> PCRandomness for Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     fn empty() -> Self {
         Self {
@@ -474,12 +389,12 @@ where
     }
 }
 
-impl<'a, E: PairingEngine, P: DenseMVPolynomial<E::Fr>> Add<&'a Randomness<E, P>>
+impl<'a, E: Pairing, P: DenseMVPolynomial<E::ScalarField>> Add<&'a Randomness<E, P>>
     for Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     type Output = Self;
 
@@ -490,16 +405,16 @@ where
     }
 }
 
-impl<'a, E, P> Add<(E::Fr, &'a Randomness<E, P>)> for Randomness<E, P>
+impl<'a, E, P> Add<(E::ScalarField, &'a Randomness<E, P>)> for Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     type Output = Self;
 
     #[inline]
-    fn add(mut self, other: (E::Fr, &'a Randomness<E, P>)) -> Self {
+    fn add(mut self, other: (E::ScalarField, &'a Randomness<E, P>)) -> Self {
         self += other;
         self
     }
@@ -507,9 +422,9 @@ where
 
 impl<'a, E, P> AddAssign<&'a Randomness<E, P>> for Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     #[inline]
     fn add_assign(&mut self, other: &'a Self) {
@@ -517,14 +432,14 @@ where
     }
 }
 
-impl<'a, E, P> AddAssign<(E::Fr, &'a Randomness<E, P>)> for Randomness<E, P>
+impl<'a, E, P> AddAssign<(E::ScalarField, &'a Randomness<E, P>)> for Randomness<E, P>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr>,
-    P::Point: Index<usize, Output = E::Fr>,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     #[inline]
-    fn add_assign(&mut self, (f, other): (E::Fr, &'a Randomness<E, P>)) {
+    fn add_assign(&mut self, (f, other): (E::ScalarField, &'a Randomness<E, P>)) {
         self.blinding_polynomial += (f, &other.blinding_polynomial);
     }
 }
@@ -539,21 +454,10 @@ where
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct Proof<E: PairingEngine> {
+pub struct Proof<E: Pairing> {
     /// Commitments to the witness polynomials
     pub w: Vec<E::G1Affine>,
     /// Evaluation of the random polynomial at the point for which
     /// the evaluation proof was produced.
-    pub random_v: Option<E::Fr>,
-}
-
-impl<E: PairingEngine> PCProof for Proof<E> {
-    fn size_in_bytes(&self) -> usize {
-        let hiding_size = if self.random_v.is_some() {
-            E::Fr::zero().serialized_size()
-        } else {
-            0
-        };
-        (self.w.len() * E::G1Affine::zero().serialized_size()) / 2 + hiding_size
-    }
+    pub random_v: Option<E::ScalarField>,
 }
diff --git a/src/marlin/marlin_pst13_pc/mod.rs b/src/marlin/marlin_pst13_pc/mod.rs
index 20f7096..eaa5eee 100644
--- a/src/marlin/marlin_pst13_pc/mod.rs
+++ b/src/marlin/marlin_pst13_pc/mod.rs
@@ -7,10 +7,8 @@ use crate::{BatchLCProof, Error, Evaluations, QuerySet};
 use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination};
 use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment};
 use crate::{ToString, Vec};
-use ark_ec::{
-    msm::{FixedBase, VariableBaseMSM},
-    AffineCurve, PairingEngine, ProjectiveCurve,
-};
+use ark_ec::AffineRepr;
+use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM};
 use ark_ff::{One, PrimeField, UniformRand, Zero};
 use ark_poly::{multivariate::Term, DenseMVPolynomial};
 use ark_std::rand::RngCore;
@@ -23,7 +21,7 @@ mod combinations;
 use combinations::*;
 
 use crate::challenge::ChallengeGenerator;
-use ark_sponge::CryptographicSponge;
+use ark_crypto_primitives::sponge::CryptographicSponge;
 #[cfg(feature = "parallel")]
 use rayon::prelude::*;
 
@@ -33,13 +31,15 @@ use rayon::prelude::*;
 ///
 /// [pst]: https://eprint.iacr.org/2011/587
 /// [marlin]: https://eprint.iacr.org/2019/104
-pub struct MarlinPST13<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> {
+pub struct MarlinPST13<E: Pairing, P: DenseMVPolynomial<E::ScalarField>, S: CryptographicSponge> {
     _engine: PhantomData<E>,
     _poly: PhantomData<P>,
     _sponge: PhantomData<S>,
 }
 
-impl<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> MarlinPST13<E, P, S> {
+impl<E: Pairing, P: DenseMVPolynomial<E::ScalarField>, S: CryptographicSponge>
+    MarlinPST13<E, P, S>
+{
     /// Given some point `z`, compute the quotients `w_i(X)` s.t
     ///
     /// `p(X) - p(z) = (X_1-z_1)*w_1(X) + (X_2-z_2)*w_2(X) + ... + (X_l-z_l)*w_l(X)`
@@ -47,7 +47,7 @@ impl<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> Marl
     /// These quotients can always be found with no remainder.
     fn divide_at_point(p: &P, point: &P::Point) -> Vec<P>
     where
-        P::Point: Index<usize, Output = E::Fr>,
+        P::Point: Index<usize, Output = E::ScalarField>,
     {
         let num_vars = p.num_vars();
         if p.is_zero() {
@@ -115,7 +115,7 @@ impl<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> Marl
     /// Check that a given polynomial is supported by parameters
     fn check_degrees_and_bounds<'a>(
         supported_degree: usize,
-        p: &'a LabeledPolynomial<E::Fr, P>,
+        p: &'a LabeledPolynomial<E::ScalarField, P>,
     ) -> Result<(), Error>
     where
         P: 'a,
@@ -132,7 +132,7 @@ impl<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> Marl
     }
 
     /// Convert polynomial coefficients to `BigInt`
-    fn convert_to_bigints(p: &P) -> Vec<<E::Fr as PrimeField>::BigInt> {
+    fn convert_to_bigints(p: &P) -> Vec<<E::ScalarField as PrimeField>::BigInt> {
         let plain_coeffs = ark_std::cfg_into_iter!(p.terms())
             .map(|(coeff, _)| coeff.into_bigint())
             .collect();
@@ -140,12 +140,12 @@ impl<E: PairingEngine, P: DenseMVPolynomial<E::Fr>, S: CryptographicSponge> Marl
     }
 }
 
-impl<E, P, S> PolynomialCommitment<E::Fr, P, S> for MarlinPST13<E, P, S>
+impl<E, P, S> PolynomialCommitment<E::ScalarField, P, S> for MarlinPST13<E, P, S>
 where
-    E: PairingEngine,
-    P: DenseMVPolynomial<E::Fr> + Sync,
+    E: Pairing,
+    P: DenseMVPolynomial<E::ScalarField> + Sync,
     S: CryptographicSponge,
-    P::Point: Index<usize, Output = E::Fr>,
+    P::Point: Index<usize, Output = E::ScalarField>,
 {
     type UniversalParams = UniversalParams<E, P>;
     type CommitterKey = CommitterKey<E, P>;
@@ -179,12 +179,12 @@ where
         // Trapdoor evaluation points
         let mut betas = Vec::with_capacity(num_vars);
         for _ in 0..num_vars {
-            betas.push(E::Fr::rand(rng));
+            betas.push(E::ScalarField::rand(rng));
         }
         // Generators
-        let g = E::G1Projective::rand(rng);
-        let gamma_g = E::G1Projective::rand(rng);
-        let h = E::G2Projective::rand(rng);
+        let g = E::G1::rand(rng);
+        let gamma_g = E::G1::rand(rng);
+        let h = E::G2::rand(rng);
 
         // A list of all variable numbers of multiplicity `max_degree`
         let variable_set: Vec<_> = (0..num_vars)
@@ -203,7 +203,7 @@ where
                 // trapdoor and generate a `P::Term` object to index it
                 ark_std::cfg_into_iter!(terms)
                     .map(|term| {
-                        let value: E::Fr = term.iter().map(|e| betas[*e]).product();
+                        let value: E::ScalarField = term.iter().map(|e| betas[*e]).product();
                         let term = (0..num_vars)
                             .map(|var| (var, term.iter().filter(|e| **e == var).count()))
                             .collect();
@@ -213,12 +213,12 @@ where
             })
             .unzip();
 
-        let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
         let g_time = start_timer!(|| "Generating powers of G");
         let window_size = FixedBase::get_mul_window_size(max_degree + 1);
         let g_table = FixedBase::get_window_table(scalar_bits, window_size, g);
         let mut powers_of_g =
-            FixedBase::msm::<E::G1Projective>(scalar_bits, window_size, &g_table, &powers_of_beta);
+            FixedBase::msm::<E::G1>(scalar_bits, window_size, &g_table, &powers_of_beta);
         powers_of_g.push(g);
         powers_of_beta_terms.push(P::Term::new(vec![]));
         end_timer!(g_time);
@@ -234,12 +234,12 @@ where
             .enumerate()
             .for_each(|(i, v)| {
                 let mut powers_of_beta = Vec::with_capacity(max_degree);
-                let mut cur = E::Fr::one();
+                let mut cur = E::ScalarField::one();
                 for _ in 0..=max_degree {
                     cur *= &betas[i];
                     powers_of_beta.push(cur);
                 }
-                *v = FixedBase::msm::<E::G1Projective>(
+                *v = FixedBase::msm::<E::G1>(
                     scalar_bits,
                     window_size,
                     &gamma_g_table,
@@ -248,11 +248,11 @@ where
             });
         end_timer!(gamma_g_time);
 
-        let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g);
+        let powers_of_g = E::G1::normalize_batch(&powers_of_g);
         let gamma_g = gamma_g.into_affine();
         let powers_of_gamma_g = powers_of_gamma_g
             .into_iter()
-            .map(|v| E::G1Projective::batch_normalization_into_affine(&v))
+            .map(|v| E::G1::normalize_batch(&v))
             .collect();
         let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect();
         let h = h.into_affine();
@@ -340,7 +340,7 @@ where
     /// Outputs a commitments to `polynomials`.
     fn commit<'a>(
         ck: &Self::CommitterKey,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         rng: Option<&mut dyn RngCore>,
     ) -> Result<
         (
@@ -380,8 +380,7 @@ where
             end_timer!(to_bigint_time);
 
             let msm_time = start_timer!(|| "MSM to compute commitment to plaintext poly");
-            let mut commitment =
-                <E::G1Projective as VariableBaseMSM>::msm_bigint(&powers_of_g, &plain_ints);
+            let mut commitment = <E::G1 as VariableBaseMSM>::msm_bigint(&powers_of_g, &plain_ints);
             end_timer!(msm_time);
 
             // Sample random polynomial
@@ -417,12 +416,12 @@ where
 
             let msm_time = start_timer!(|| "MSM to compute commitment to random poly");
             let random_commitment =
-                <E::G1Projective as VariableBaseMSM>::msm_bigint(&powers_of_gamma_g, &random_ints)
+                <E::G1 as VariableBaseMSM>::msm_bigint(&powers_of_gamma_g, &random_ints)
                     .into_affine();
             end_timer!(msm_time);
 
             // Mask commitment with random poly
-            commitment.add_assign_mixed(&random_commitment);
+            commitment += &random_commitment;
 
             let comm = Self::Commitment {
                 comm: kzg10::Commitment(commitment.into()),
@@ -440,10 +439,10 @@ where
     /// On input a polynomial `p` and a point `point`, outputs a proof for the same.
     fn open<'a>(
         ck: &Self::CommitterKey,
-        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         _commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &P::Point,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<Self::Proof, Self::Error>
@@ -486,7 +485,7 @@ where
                 // Convert coefficients to BigInt
                 let witness_ints = Self::convert_to_bigints(&w);
                 // Compute MSM
-                <E::G1Projective as VariableBaseMSM>::msm_bigint(&powers_of_g, &witness_ints)
+                <E::G1 as VariableBaseMSM>::msm_bigint(&powers_of_g, &witness_ints)
             })
             .collect::<Vec<_>>();
         end_timer!(witness_comm_time);
@@ -516,7 +515,7 @@ where
                     // Convert coefficients to BigInt
                     let hiding_witness_ints = Self::convert_to_bigints(hiding_witness);
                     // Compute MSM and add result to witness
-                    *witness += &<E::G1Projective as VariableBaseMSM>::msm_bigint(
+                    *witness += &<E::G1 as VariableBaseMSM>::msm_bigint(
                         &powers_of_gamma_g,
                         &hiding_witness_ints,
                     );
@@ -539,9 +538,9 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &'a P::Point,
-        values: impl IntoIterator<Item = E::Fr>,
+        values: impl IntoIterator<Item = E::ScalarField>,
         proof: &Self::Proof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<bool, Self::Error>
     where
@@ -557,22 +556,23 @@ where
                 None,
             )?;
         // Compute both sides of the pairing equation
-        let mut inner = combined_comm.into().into_projective() - &vk.g.mul(combined_value);
+        let mut inner = combined_comm.into().into_group() - &vk.g.mul(combined_value);
         if let Some(random_v) = proof.random_v {
             inner -= &vk.gamma_g.mul(random_v);
         }
         let lhs = E::pairing(inner, vk.h);
 
         // Create a list of elements corresponding to each pairing in the product on the rhs
-        let rhs_product: Vec<(E::G1Prepared, E::G2Prepared)> = ark_std::cfg_iter!(proof.w)
-            .enumerate()
-            .map(|(j, w_j)| {
-                let beta_minus_z: E::G2Affine =
-                    (vk.beta_h[j].into_projective() - &vk.h.mul(point[j])).into();
-                ((*w_j).into(), beta_minus_z.into())
-            })
-            .collect();
-        let rhs = E::product_of_pairings(&rhs_product);
+        let (rhs_product_g1, rhs_product_g2): (Vec<E::G1Prepared>, Vec<E::G2Prepared>) =
+            ark_std::cfg_iter!(proof.w)
+                .enumerate()
+                .map(|(j, w_j)| {
+                    let beta_minus_z: E::G2Affine =
+                        (vk.beta_h[j].into_group() - &vk.h.mul(point[j])).into();
+                    ((*w_j).into(), beta_minus_z.into())
+                })
+                .unzip();
+        let rhs = E::multi_pairing(rhs_product_g1, rhs_product_g2);
         end_timer!(check_time);
 
         Ok(lhs == rhs)
@@ -582,9 +582,9 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        values: &Evaluations<P::Point, E::Fr>,
+        values: &Evaluations<P::Point, E::ScalarField>,
         proof: &Self::BatchProof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -600,16 +600,16 @@ where
             )?;
         let check_time =
             start_timer!(|| format!("Checking {} evaluation proofs", combined_comms.len()));
-        let g = vk.g.into_projective();
-        let gamma_g = vk.gamma_g.into_projective();
-        let mut total_c = <E::G1Projective>::zero();
-        let mut total_w = vec![<E::G1Projective>::zero(); vk.num_vars];
+        let g = vk.g.into_group();
+        let gamma_g = vk.gamma_g.into_group();
+        let mut total_c = <E::G1>::zero();
+        let mut total_w = vec![<E::G1>::zero(); vk.num_vars];
         let combination_time = start_timer!(|| "Combining commitments and proofs");
-        let mut randomizer = E::Fr::one();
+        let mut randomizer = E::ScalarField::one();
         // Instead of multiplying g and gamma_g in each turn, we simply accumulate
         // their coefficients and perform a final multiplication at the end.
-        let mut g_multiplier = E::Fr::zero();
-        let mut gamma_g_multiplier = E::Fr::zero();
+        let mut g_multiplier = E::ScalarField::zero();
+        let mut gamma_g_multiplier = E::ScalarField::zero();
         for (((c, z), v), proof) in combined_comms
             .iter()
             .zip(combined_queries)
@@ -617,11 +617,11 @@ where
             .zip(proof)
         {
             let w = &proof.w;
-            let mut temp: E::G1Projective = ark_std::cfg_iter!(w)
+            let mut temp: E::G1 = ark_std::cfg_iter!(w)
                 .enumerate()
                 .map(|(j, w_j)| w_j.mul(z[j]))
                 .sum();
-            temp.add_assign_mixed(&c.0);
+            temp += &c.0;
             let c = temp;
             g_multiplier += &(randomizer * &v);
             if let Some(random_v) = proof.random_v {
@@ -640,15 +640,17 @@ where
         end_timer!(combination_time);
 
         let to_affine_time = start_timer!(|| "Converting results to affine for pairing");
-        let mut pairings = Vec::new();
-        total_w.into_iter().enumerate().for_each(|(j, w_j)| {
-            pairings.push(((-w_j).into_affine().into(), vk.prepared_beta_h[j].clone()))
-        });
-        pairings.push((total_c.into_affine().into(), vk.prepared_h.clone()));
+        let (mut p1, mut p2): (Vec<E::G1Prepared>, Vec<E::G2Prepared>) = total_w
+            .into_iter()
+            .enumerate()
+            .map(|(j, w_j)| ((-w_j).into_affine().into(), vk.prepared_beta_h[j].clone()))
+            .unzip();
+        p1.push(total_c.into_affine().into());
+        p2.push(vk.prepared_h.clone());
         end_timer!(to_affine_time);
 
         let pairing_time = start_timer!(|| "Performing product of pairings");
-        let result = E::product_of_pairings(&pairings).is_one();
+        let result = E::multi_pairing(p1, p2).0.is_one();
         end_timer!(pairing_time);
         end_timer!(check_time);
         Ok(result)
@@ -656,14 +658,14 @@ where
 
     fn open_combinations<'a>(
         ck: &Self::CommitterKey,
-        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         rng: Option<&mut dyn RngCore>,
-    ) -> Result<BatchLCProof<E::Fr, Self::BatchProof>, Self::Error>
+    ) -> Result<BatchLCProof<E::ScalarField, Self::BatchProof>, Self::Error>
     where
         P: 'a,
         Self::Randomness: 'a,
@@ -685,12 +687,12 @@ where
     /// committed in `labeled_commitments`.
     fn check_combinations<'a, R: RngCore>(
         vk: &Self::VerifierKey,
-        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
+        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         eqn_query_set: &QuerySet<P::Point>,
-        eqn_evaluations: &Evaluations<P::Point, E::Fr>,
-        proof: &BatchLCProof<E::Fr, Self::BatchProof>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        eqn_evaluations: &Evaluations<P::Point, E::ScalarField>,
+        proof: &BatchLCProof<E::ScalarField, Self::BatchProof>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -715,39 +717,43 @@ mod tests {
     use super::MarlinPST13;
     use ark_bls12_377::Bls12_377;
     use ark_bls12_381::Bls12_381;
-    use ark_ec::PairingEngine;
+    use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
+    use ark_ec::pairing::Pairing;
     use ark_ff::UniformRand;
     use ark_poly::{
         multivariate::{SparsePolynomial as SparsePoly, SparseTerm},
         DenseMVPolynomial,
     };
-    use ark_sponge::poseidon::PoseidonSponge;
+    use ark_std::vec::Vec;
     use rand_chacha::ChaCha20Rng;
 
-    type MVPoly_381 = SparsePoly<<Bls12_381 as PairingEngine>::Fr, SparseTerm>;
-    type MVPoly_377 = SparsePoly<<Bls12_377 as PairingEngine>::Fr, SparseTerm>;
+    type MVPoly_381 = SparsePoly<<Bls12_381 as Pairing>::ScalarField, SparseTerm>;
+    type MVPoly_377 = SparsePoly<<Bls12_377 as Pairing>::ScalarField, SparseTerm>;
 
     type PC<E, P, S> = MarlinPST13<E, P, S>;
 
-    type Sponge_bls12_381 = PoseidonSponge<<Bls12_381 as PairingEngine>::Fr>;
-    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as PairingEngine>::Fr>;
+    type Sponge_bls12_381 = PoseidonSponge<<Bls12_381 as Pairing>::ScalarField>;
+    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as Pairing>::ScalarField>;
 
     type PC_Bls12_381 = PC<Bls12_381, MVPoly_381, Sponge_bls12_381>;
     type PC_Bls12_377 = PC<Bls12_377, MVPoly_377, Sponge_Bls12_377>;
 
-    fn rand_poly<E: PairingEngine>(
+    fn rand_poly<E: Pairing>(
         degree: usize,
         num_vars: Option<usize>,
         rng: &mut ChaCha20Rng,
-    ) -> SparsePoly<E::Fr, SparseTerm> {
-        SparsePoly::<E::Fr, SparseTerm>::rand(degree, num_vars.unwrap(), rng)
+    ) -> SparsePoly<E::ScalarField, SparseTerm> {
+        SparsePoly::<E::ScalarField, SparseTerm>::rand(degree, num_vars.unwrap(), rng)
     }
 
-    fn rand_point<E: PairingEngine>(num_vars: Option<usize>, rng: &mut ChaCha20Rng) -> Vec<E::Fr> {
+    fn rand_point<E: Pairing>(
+        num_vars: Option<usize>,
+        rng: &mut ChaCha20Rng,
+    ) -> Vec<E::ScalarField> {
         let num_vars = num_vars.unwrap();
         let mut point = Vec::with_capacity(num_vars);
         for _ in 0..num_vars {
-            point.push(E::Fr::rand(rng));
+            point.push(E::ScalarField::rand(rng));
         }
         point
     }
diff --git a/src/marlin/mod.rs b/src/marlin/mod.rs
index eff8c4c..57446c0 100644
--- a/src/marlin/mod.rs
+++ b/src/marlin/mod.rs
@@ -4,9 +4,11 @@ use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec};
 use crate::{BatchLCProof, LabeledPolynomial, LinearCombination};
 use crate::{Evaluations, LabeledCommitment, QuerySet};
 use crate::{PCRandomness, Polynomial, PolynomialCommitment};
-use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
+use ark_crypto_primitives::sponge::CryptographicSponge;
+use ark_ec::pairing::Pairing;
+use ark_ec::AffineRepr;
+use ark_ec::CurveGroup;
 use ark_ff::{One, Zero};
-use ark_sponge::CryptographicSponge;
 use ark_std::{convert::TryInto, hash::Hash, ops::AddAssign, ops::Mul};
 
 /// Polynomial commitment scheme from [[KZG10]][kzg] that enforces
@@ -28,10 +30,10 @@ pub mod marlin_pst13_pc;
 /// Common functionalities between `marlin_pc` and `marlin_pst13_pc`
 struct Marlin<E, S, P, PC>
 where
-    E: PairingEngine,
+    E: Pairing,
     S: CryptographicSponge,
-    P: Polynomial<E::Fr>,
-    PC: PolynomialCommitment<E::Fr, P, S>,
+    P: Polynomial<E::ScalarField>,
+    PC: PolynomialCommitment<E::ScalarField, P, S>,
 {
     _engine: core::marker::PhantomData<E>,
     _sponge: core::marker::PhantomData<S>,
@@ -41,20 +43,20 @@ where
 
 impl<E, S, P, PC> Marlin<E, S, P, PC>
 where
-    E: PairingEngine,
+    E: Pairing,
     S: CryptographicSponge,
-    P: Polynomial<E::Fr>,
-    PC: PolynomialCommitment<E::Fr, P, S>,
+    P: Polynomial<E::ScalarField>,
+    PC: PolynomialCommitment<E::ScalarField, P, S>,
 {
     /// MSM for `commitments` and `coeffs`
     fn combine_commitments<'a>(
-        coeffs_and_comms: impl IntoIterator<Item = (E::Fr, &'a marlin_pc::Commitment<E>)>,
-    ) -> (E::G1Projective, Option<E::G1Projective>) {
-        let mut combined_comm = E::G1Projective::zero();
+        coeffs_and_comms: impl IntoIterator<Item = (E::ScalarField, &'a marlin_pc::Commitment<E>)>,
+    ) -> (E::G1, Option<E::G1>) {
+        let mut combined_comm = E::G1::zero();
         let mut combined_shifted_comm = None;
         for (coeff, comm) in coeffs_and_comms {
             if coeff.is_one() {
-                combined_comm.add_assign_mixed(&comm.comm.0);
+                combined_comm.add_assign(&comm.comm.0);
             } else {
                 combined_comm += &comm.comm.0.mul(coeff);
             }
@@ -69,7 +71,7 @@ where
 
     /// Normalize a list of commitments
     fn normalize_commitments<'a>(
-        commitments: Vec<(E::G1Projective, Option<E::G1Projective>)>,
+        commitments: Vec<(E::G1, Option<E::G1>)>,
     ) -> Vec<marlin_pc::Commitment<E>> {
         let mut comms = Vec::with_capacity(commitments.len());
         let mut s_comms = Vec::with_capacity(commitments.len());
@@ -80,12 +82,12 @@ where
                 s_comms.push(c);
                 s_flags.push(true);
             } else {
-                s_comms.push(E::G1Projective::zero());
+                s_comms.push(E::G1::zero());
                 s_flags.push(false);
             }
         }
-        let comms = E::G1Projective::batch_normalization_into_affine(&comms);
-        let s_comms = E::G1Projective::batch_normalization_into_affine(&mut s_comms);
+        let comms = E::G1::normalize_batch(&comms);
+        let s_comms = E::G1::normalize_batch(&mut s_comms);
         comms
             .into_iter()
             .zip(s_comms)
@@ -107,13 +109,13 @@ where
     /// Accumulate `commitments` and `values` according to the challenges produces by `challenge_gen`.
     fn accumulate_commitments_and_values<'a>(
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<marlin_pc::Commitment<E>>>,
-        values: impl IntoIterator<Item = E::Fr>,
-        challenge_gen: &mut ChallengeGenerator<E::Fr, S>,
+        values: impl IntoIterator<Item = E::ScalarField>,
+        challenge_gen: &mut ChallengeGenerator<E::ScalarField, S>,
         vk: Option<&marlin_pc::VerifierKey<E>>,
-    ) -> Result<(E::G1Projective, E::Fr), Error> {
+    ) -> Result<(E::G1, E::ScalarField), Error> {
         let acc_time = start_timer!(|| "Accumulating commitments and values");
-        let mut combined_comm = E::G1Projective::zero();
-        let mut combined_value = E::Fr::zero();
+        let mut combined_comm = E::G1::zero();
+        let mut combined_value = E::ScalarField::zero();
         for (labeled_commitment, value) in commitments.into_iter().zip(values) {
             let degree_bound = labeled_commitment.degree_bound();
             let commitment = labeled_commitment.commitment();
@@ -127,12 +129,7 @@ where
             if let Some(degree_bound) = degree_bound {
                 let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE);
 
-                let shifted_comm = commitment
-                    .shifted_comm
-                    .as_ref()
-                    .unwrap()
-                    .0
-                    .into_projective();
+                let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group();
 
                 let shift_power = vk
                     .unwrap()
@@ -154,10 +151,10 @@ where
     fn combine_and_normalize<'a, D: Clone + Ord + Sync>(
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<marlin_pc::Commitment<E>>>,
         query_set: &QuerySet<D>,
-        evaluations: &Evaluations<D, E::Fr>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        evaluations: &Evaluations<D, E::ScalarField>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         vk: Option<&marlin_pc::VerifierKey<E>>,
-    ) -> Result<(Vec<kzg10::Commitment<E>>, Vec<D>, Vec<E::Fr>), Error>
+    ) -> Result<(Vec<kzg10::Commitment<E>>, Vec<D>, Vec<E::ScalarField>), Error>
     where
         marlin_pc::Commitment<E>: 'a,
     {
@@ -212,8 +209,8 @@ where
             combined_evals.push(v);
         }
         let norm_time = start_timer!(|| "Normalizing combined commitments");
-        E::G1Projective::batch_normalization(&mut combined_comms);
-        let combined_comms = combined_comms
+        let combined_comms_affine = E::G1::normalize_batch(&combined_comms);
+        let combined_comms = combined_comms_affine
             .into_iter()
             .map(|c| kzg10::Commitment(c.into()))
             .collect::<Vec<_>>();
@@ -226,26 +223,26 @@ where
     /// the combinations at the points in the query set.
     fn open_combinations<'a, D>(
         ck: &PC::CommitterKey,
-        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<PC::Commitment>>,
         query_set: &QuerySet<D>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a PC::Randomness>,
         rng: Option<&mut dyn RngCore>,
-    ) -> Result<BatchLCProof<E::Fr, PC::BatchProof>, Error>
+    ) -> Result<BatchLCProof<E::ScalarField, PC::BatchProof>, Error>
     where
-        P: 'a + Polynomial<E::Fr, Point = D>,
+        P: 'a + Polynomial<E::ScalarField, Point = D>,
         D: Debug + Clone + Hash + Ord + Sync,
         PC: PolynomialCommitment<
-            E::Fr,
+            E::ScalarField,
             P,
             S,
             Commitment = marlin_pc::Commitment<E>,
             PreparedCommitment = marlin_pc::PreparedCommitment<E>,
             Error = Error,
         >,
-        PC::Randomness: 'a + AddAssign<(E::Fr, &'a PC::Randomness)>,
+        PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>,
         PC::Commitment: 'a,
     {
         let label_map = polynomials
@@ -322,20 +319,20 @@ where
 
     fn check_combinations<'a, R, D>(
         vk: &PC::VerifierKey,
-        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
+        lc_s: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<PC::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        evaluations: &Evaluations<P::Point, E::Fr>,
-        proof: &BatchLCProof<E::Fr, PC::BatchProof>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        evaluations: &Evaluations<P::Point, E::ScalarField>,
+        proof: &BatchLCProof<E::ScalarField, PC::BatchProof>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Error>
     where
         R: RngCore,
-        P: Polynomial<E::Fr, Point = D>,
+        P: Polynomial<E::ScalarField, Point = D>,
         D: Debug + Clone + Hash + Ord + Sync,
         PC: PolynomialCommitment<
-            E::Fr,
+            E::ScalarField,
             P,
             S,
             Commitment = marlin_pc::Commitment<E>,
diff --git a/src/multilinear_pc/data_structures.rs b/src/multilinear_pc/data_structures.rs
index e52ef66..6920ece 100644
--- a/src/multilinear_pc/data_structures.rs
+++ b/src/multilinear_pc/data_structures.rs
@@ -1,16 +1,16 @@
-use ark_ec::PairingEngine;
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write};
+use ark_ec::pairing::Pairing;
+use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
 use ark_std::vec::Vec;
 #[allow(type_alias_bounds)]
 /// Evaluations over {0,1}^n for G1
-pub type EvaluationHyperCubeOnG1<E: PairingEngine> = Vec<E::G1Affine>;
+pub type EvaluationHyperCubeOnG1<E: Pairing> = Vec<E::G1Affine>;
 #[allow(type_alias_bounds)]
 /// Evaluations over {0,1}^n for G2
-pub type EvaluationHyperCubeOnG2<E: PairingEngine> = Vec<E::G2Affine>;
+pub type EvaluationHyperCubeOnG2<E: Pairing> = Vec<E::G2Affine>;
 
 /// Public Parameter used by prover
 #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
-pub struct UniversalParams<E: PairingEngine> {
+pub struct UniversalParams<E: Pairing> {
     /// number of variables
     pub num_vars: usize,
     /// `pp_{num_vars}`, `pp_{num_vars - 1}`, `pp_{num_vars - 2}`, ..., defined by XZZPD19
@@ -27,7 +27,7 @@ pub struct UniversalParams<E: PairingEngine> {
 
 /// Public Parameter used by prover
 #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
-pub struct CommitterKey<E: PairingEngine> {
+pub struct CommitterKey<E: Pairing> {
     /// number of variables
     pub nv: usize,
     /// pp_k defined by libra
@@ -42,7 +42,7 @@ pub struct CommitterKey<E: PairingEngine> {
 
 /// Public Parameter used by prover
 #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// number of variables
     pub nv: usize,
     /// generator of G1
@@ -55,7 +55,7 @@ pub struct VerifierKey<E: PairingEngine> {
 
 #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
 /// commitment
-pub struct Commitment<E: PairingEngine> {
+pub struct Commitment<E: Pairing> {
     /// number of variables
     pub nv: usize,
     /// product of g as described by the vRAM paper
@@ -64,7 +64,7 @@ pub struct Commitment<E: PairingEngine> {
 
 #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
 /// proof of opening
-pub struct Proof<E: PairingEngine> {
+pub struct Proof<E: Pairing> {
     /// Evaluation of quotients
     pub proofs: Vec<E::G2Affine>,
 }
diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs
index 6d00163..bd5d3e5 100644
--- a/src/multilinear_pc/mod.rs
+++ b/src/multilinear_pc/mod.rs
@@ -1,8 +1,9 @@
 use crate::multilinear_pc::data_structures::{
     Commitment, CommitterKey, Proof, UniversalParams, VerifierKey,
 };
-use ark_ec::msm::{FixedBase, VariableBaseMSM};
-use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
+use ark_ec::AffineRepr;
+use ark_ec::{pairing::Pairing, CurveGroup};
+use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM};
 use ark_ff::{Field, PrimeField};
 use ark_ff::{One, Zero};
 use ark_poly::{DenseMultilinearExtension, MultilinearExtension};
@@ -18,24 +19,24 @@ use ark_std::UniformRand;
 pub mod data_structures;
 
 /// Polynomial Commitment Scheme on multilinear extensions.
-pub struct MultilinearPC<E: PairingEngine> {
+pub struct MultilinearPC<E: Pairing> {
     _engine: PhantomData<E>,
 }
 
-impl<E: PairingEngine> MultilinearPC<E> {
+impl<E: Pairing> MultilinearPC<E> {
     /// setup
     pub fn setup<R: RngCore>(num_vars: usize, rng: &mut R) -> UniversalParams<E> {
         assert!(num_vars > 0, "constant polynomial not supported");
-        let g: E::G1Projective = E::G1Projective::rand(rng);
-        let h: E::G2Projective = E::G2Projective::rand(rng);
+        let g: E::G1 = E::G1::rand(rng);
+        let h: E::G2 = E::G2::rand(rng);
         let g = g.into_affine();
         let h = h.into_affine();
         let mut powers_of_g = Vec::new();
         let mut powers_of_h = Vec::new();
-        let t: Vec<_> = (0..num_vars).map(|_| E::Fr::rand(rng)).collect();
-        let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect();
+        let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
 
-        let mut eq: LinkedList<DenseMultilinearExtension<E::Fr>> =
+        let mut eq: LinkedList<DenseMultilinearExtension<E::ScalarField>> =
             LinkedList::from_iter(eq_extension(&t).into_iter());
         let mut eq_arr = LinkedList::new();
         let mut base = eq.pop_back().unwrap().evaluations;
@@ -61,16 +62,16 @@ impl<E: PairingEngine> MultilinearPC<E> {
             total_scalars += 1 << (num_vars - i);
         }
         let window_size = FixedBase::get_mul_window_size(total_scalars);
-        let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_projective());
-        let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_projective());
+        let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group());
+        let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group());
 
-        let pp_g = E::G1Projective::batch_normalization_into_affine(&FixedBase::msm(
+        let pp_g = E::G1::normalize_batch(&FixedBase::msm(
             scalar_bits,
             window_size,
             &g_table,
             &pp_powers,
         ));
-        let pp_h = E::G2Projective::batch_normalization_into_affine(&FixedBase::msm(
+        let pp_h = E::G2::normalize_batch(&FixedBase::msm(
             scalar_bits,
             window_size,
             &h_table,
@@ -90,14 +91,8 @@ impl<E: PairingEngine> MultilinearPC<E> {
         // let vp_generation_timer = start_timer!(|| "VP generation");
         let g_mask = {
             let window_size = FixedBase::get_mul_window_size(num_vars);
-            let g_table =
-                FixedBase::get_window_table(scalar_bits, window_size, g.into_projective());
-            E::G1Projective::batch_normalization_into_affine(&FixedBase::msm(
-                scalar_bits,
-                window_size,
-                &g_table,
-                &t,
-            ))
+            let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group());
+            E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t))
         };
         // end_timer!(vp_generation_timer);
 
@@ -139,7 +134,7 @@ impl<E: PairingEngine> MultilinearPC<E> {
     /// commit
     pub fn commit(
         ck: &CommitterKey<E>,
-        polynomial: &impl MultilinearExtension<E::Fr>,
+        polynomial: &impl MultilinearExtension<E::ScalarField>,
     ) -> Commitment<E> {
         let nv = polynomial.num_vars();
         let scalars: Vec<_> = polynomial
@@ -147,24 +142,22 @@ impl<E: PairingEngine> MultilinearPC<E> {
             .into_iter()
             .map(|x| x.into_bigint())
             .collect();
-        let g_product = <E::G1Projective as VariableBaseMSM>::msm_bigint(
-            &ck.powers_of_g[0],
-            scalars.as_slice(),
-        )
-        .into_affine();
+        let g_product =
+            <E::G1 as VariableBaseMSM>::msm_bigint(&ck.powers_of_g[0], scalars.as_slice())
+                .into_affine();
         Commitment { nv, g_product }
     }
 
     /// On input a polynomial `p` and a point `point`, outputs a proof for the same.
     pub fn open(
         ck: &CommitterKey<E>,
-        polynomial: &impl MultilinearExtension<E::Fr>,
-        point: &[E::Fr],
+        polynomial: &impl MultilinearExtension<E::ScalarField>,
+        point: &[E::ScalarField],
     ) -> Proof<E> {
         assert_eq!(polynomial.num_vars(), ck.nv, "Invalid size of polynomial");
         let nv = polynomial.num_vars();
-        let mut r: Vec<Vec<E::Fr>> = (0..nv + 1).map(|_| Vec::new()).collect();
-        let mut q: Vec<Vec<E::Fr>> = (0..nv + 1).map(|_| Vec::new()).collect();
+        let mut r: Vec<Vec<E::ScalarField>> = (0..nv + 1).map(|_| Vec::new()).collect();
+        let mut q: Vec<Vec<E::ScalarField>> = (0..nv + 1).map(|_| Vec::new()).collect();
 
         r[nv] = polynomial.to_evaluations();
 
@@ -172,11 +165,15 @@ impl<E: PairingEngine> MultilinearPC<E> {
         for i in 0..nv {
             let k = nv - i;
             let point_at_k = point[i];
-            q[k] = (0..(1 << (k - 1))).map(|_| E::Fr::zero()).collect();
-            r[k - 1] = (0..(1 << (k - 1))).map(|_| E::Fr::zero()).collect();
+            q[k] = (0..(1 << (k - 1)))
+                .map(|_| E::ScalarField::zero())
+                .collect();
+            r[k - 1] = (0..(1 << (k - 1)))
+                .map(|_| E::ScalarField::zero())
+                .collect();
             for b in 0..(1 << (k - 1)) {
                 q[k][b] = r[k][(b << 1) + 1] - &r[k][b << 1];
-                r[k - 1][b] = r[k][b << 1] * &(E::Fr::one() - &point_at_k)
+                r[k - 1][b] = r[k][b << 1] * &(E::ScalarField::one() - &point_at_k)
                     + &(r[k][(b << 1) + 1] * &point_at_k);
             }
             let scalars: Vec<_> = (0..(1 << k))
@@ -184,8 +181,7 @@ impl<E: PairingEngine> MultilinearPC<E> {
                 .collect();
 
             let pi_h =
-                <E::G2Projective as VariableBaseMSM>::msm_bigint(&ck.powers_of_h[i], &scalars)
-                    .into_affine(); // no need to move outside and partition
+                <E::G2 as VariableBaseMSM>::msm_bigint(&ck.powers_of_h[i], &scalars).into_affine(); // no need to move outside and partition
             proofs.push(pi_h);
         }
 
@@ -197,26 +193,22 @@ impl<E: PairingEngine> MultilinearPC<E> {
     pub fn check<'a>(
         vk: &VerifierKey<E>,
         commitment: &Commitment<E>,
-        point: &[E::Fr],
-        value: E::Fr,
+        point: &[E::ScalarField],
+        value: E::ScalarField,
         proof: &Proof<E>,
     ) -> bool {
-        let left = E::pairing(
-            commitment.g_product.into_projective() - &vk.g.mul(value),
-            vk.h,
-        );
+        let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h);
 
-        let scalar_size = E::Fr::MODULUS_BIT_SIZE as usize;
+        let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize;
         let window_size = FixedBase::get_mul_window_size(vk.nv);
 
-        let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_projective());
-        let g_mul: Vec<E::G1Projective> = FixedBase::msm(scalar_size, window_size, &g_table, point);
+        let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group());
+        let g_mul: Vec<E::G1> = FixedBase::msm(scalar_size, window_size, &g_table, point);
 
         let pairing_lefts: Vec<_> = (0..vk.nv)
-            .map(|i| vk.g_mask_random[i].into_projective() - &g_mul[i])
+            .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i])
             .collect();
-        let pairing_lefts: Vec<E::G1Affine> =
-            E::G1Projective::batch_normalization_into_affine(&pairing_lefts);
+        let pairing_lefts: Vec<E::G1Affine> = E::G1::normalize_batch(&pairing_lefts);
         let pairing_lefts: Vec<E::G1Prepared> = pairing_lefts
             .into_iter()
             .map(|x| E::G1Prepared::from(x))
@@ -228,11 +220,7 @@ impl<E: PairingEngine> MultilinearPC<E> {
             .map(|x| E::G2Prepared::from(*x))
             .collect();
 
-        let pairings: Vec<_> = pairing_lefts
-            .into_iter()
-            .zip(pairing_rights.into_iter())
-            .collect();
-        let right = E::product_of_pairings(pairings.iter());
+        let right = E::multi_pairing(pairing_lefts, pairing_rights);
         left == right
     }
 }
@@ -272,16 +260,17 @@ fn eq_extension<F: Field>(t: &[F]) -> Vec<DenseMultilinearExtension<F>> {
 
 #[cfg(test)]
 mod tests {
+    use crate::ark_std::UniformRand;
     use crate::multilinear_pc::data_structures::UniversalParams;
     use crate::multilinear_pc::MultilinearPC;
     use ark_bls12_381::Bls12_381;
-    use ark_ec::PairingEngine;
+    use ark_ec::pairing::Pairing;
     use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension};
     use ark_std::rand::RngCore;
+    use ark_std::test_rng;
     use ark_std::vec::Vec;
-    use ark_std::{test_rng, UniformRand};
     type E = Bls12_381;
-    type Fr = <E as PairingEngine>::Fr;
+    type Fr = <E as Pairing>::ScalarField;
 
     fn test_polynomial<R: RngCore>(
         uni_params: &UniversalParams<E>,
diff --git a/src/sonic_pc/data_structures.rs b/src/sonic_pc/data_structures.rs
index 07a9b77..708b558 100644
--- a/src/sonic_pc/data_structures.rs
+++ b/src/sonic_pc/data_structures.rs
@@ -2,8 +2,11 @@ use crate::kzg10;
 use crate::{
     BTreeMap, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCVerifierKey, Vec,
 };
-use ark_ec::{PairingEngine, ProjectiveCurve};
-use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
+use ark_ec::pairing::Pairing;
+use ark_ec::Group;
+use ark_serialize::{
+    CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate,
+};
 use ark_std::io::{Read, Write};
 
 /// `UniversalParams` are the universal parameters for the KZG10 scheme.
@@ -18,11 +21,11 @@ pub type Commitment<E> = kzg10::Commitment<E>;
 /// `PreparedCommitment` is the prepared commitment for the KZG10 scheme.
 pub type PreparedCommitment<E> = kzg10::PreparedCommitment<E>;
 
-impl<E: PairingEngine> PCPreparedCommitment<Commitment<E>> for PreparedCommitment<E> {
+impl<E: Pairing> PCPreparedCommitment<Commitment<E>> for PreparedCommitment<E> {
     /// prepare `PreparedCommitment` from `Commitment`
     fn prepare(comm: &Commitment<E>) -> Self {
         let mut prepared_comm = Vec::<E::G1Affine>::new();
-        let mut cur = E::G1Projective::from(comm.0.clone());
+        let mut cur = E::G1::from(comm.0.clone());
         for _ in 0..128 {
             prepared_comm.push(cur.clone().into());
             cur.double_in_place();
@@ -41,7 +44,7 @@ impl<E: PairingEngine> PCPreparedCommitment<Commitment<E>> for PreparedCommitmen
     Clone(bound = ""),
     Debug(bound = "")
 )]
-pub struct CommitterKey<E: PairingEngine> {
+pub struct CommitterKey<E: Pairing> {
     /// The key used to commit to polynomials.
     pub powers_of_g: Vec<E::G1Affine>,
 
@@ -65,7 +68,7 @@ pub struct CommitterKey<E: PairingEngine> {
     pub max_degree: usize,
 }
 
-impl<E: PairingEngine> CommitterKey<E> {
+impl<E: Pairing> CommitterKey<E> {
     /// Obtain powers for the underlying KZG10 construction
     pub fn powers(&self) -> kzg10::Powers<E> {
         kzg10::Powers {
@@ -111,7 +114,7 @@ impl<E: PairingEngine> CommitterKey<E> {
     }
 }
 
-impl<E: PairingEngine> PCCommitterKey for CommitterKey<E> {
+impl<E: Pairing> PCCommitterKey for CommitterKey<E> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -124,7 +127,7 @@ impl<E: PairingEngine> PCCommitterKey for CommitterKey<E> {
 /// `VerifierKey` is used to check evaluation proofs for a given commitment.
 #[derive(Derivative)]
 #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// The generator of G1.
     pub g: E::G1Affine,
 
@@ -156,7 +159,7 @@ pub struct VerifierKey<E: PairingEngine> {
     pub max_degree: usize,
 }
 
-impl<E: PairingEngine> VerifierKey<E> {
+impl<E: Pairing> VerifierKey<E> {
     /// Find the appropriate shift for the degree bound.
     pub fn get_shift_power(&self, degree_bound: usize) -> Option<E::G2Prepared> {
         self.degree_bounds_and_neg_powers_of_h
@@ -169,102 +172,73 @@ impl<E: PairingEngine> VerifierKey<E> {
     }
 }
 
-impl<E: PairingEngine> CanonicalSerialize for VerifierKey<E> {
-    fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize(&mut writer)?;
-        self.gamma_g.serialize(&mut writer)?;
-        self.h.serialize(&mut writer)?;
-        self.beta_h.serialize(&mut writer)?;
-        self.degree_bounds_and_neg_powers_of_h
-            .serialize(&mut writer)?;
-        self.supported_degree.serialize(&mut writer)?;
-        self.max_degree.serialize(&mut writer)
-    }
-
-    fn serialized_size(&self) -> usize {
-        self.g.serialized_size()
-            + self.gamma_g.serialized_size()
-            + self.h.serialized_size()
-            + self.beta_h.serialized_size()
-            + self.degree_bounds_and_neg_powers_of_h.serialized_size()
-            + self.supported_degree.serialized_size()
-            + self.max_degree.serialized_size()
-    }
-
-    fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_uncompressed(&mut writer)?;
-        self.gamma_g.serialize_uncompressed(&mut writer)?;
-        self.h.serialize_uncompressed(&mut writer)?;
-        self.beta_h.serialize_uncompressed(&mut writer)?;
-        self.degree_bounds_and_neg_powers_of_h
-            .serialize_uncompressed(&mut writer)?;
-        self.supported_degree.serialize_uncompressed(&mut writer)?;
-        self.max_degree.serialize_uncompressed(&mut writer)
+impl<E: Pairing> Valid for VerifierKey<E> {
+    fn check(&self) -> Result<(), SerializationError> {
+        self.g.check()?;
+        self.gamma_g.check()?;
+        self.h.check()?;
+        self.beta_h.check()?;
+        self.degree_bounds_and_neg_powers_of_h.check()?;
+        if self.supported_degree > self.max_degree {
+            return Err(SerializationError::InvalidData);
+        }
+        Ok(())
     }
+}
 
-    fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
-        self.g.serialize_unchecked(&mut writer)?;
-        self.gamma_g.serialize_unchecked(&mut writer)?;
-        self.h.serialize_unchecked(&mut writer)?;
-        self.beta_h.serialize_unchecked(&mut writer)?;
+impl<E: Pairing> CanonicalSerialize for VerifierKey<E> {
+    fn serialize_with_mode<W: Write>(
+        &self,
+        mut writer: W,
+        compress: Compress,
+    ) -> Result<(), SerializationError> {
+        self.g.serialize_with_mode(&mut writer, compress)?;
+        self.gamma_g.serialize_with_mode(&mut writer, compress)?;
+        self.h.serialize_with_mode(&mut writer, compress)?;
+        self.beta_h.serialize_with_mode(&mut writer, compress)?;
         self.degree_bounds_and_neg_powers_of_h
-            .serialize_unchecked(&mut writer)?;
-        self.supported_degree.serialize_unchecked(&mut writer)?;
-        self.max_degree.serialize_unchecked(&mut writer)
+            .serialize_with_mode(&mut writer, compress)?;
+        self.supported_degree
+            .serialize_with_mode(&mut writer, compress)?;
+        self.max_degree.serialize_with_mode(&mut writer, compress)
     }
 
-    fn uncompressed_size(&self) -> usize {
-        self.g.uncompressed_size()
-            + self.gamma_g.uncompressed_size()
-            + self.h.uncompressed_size()
-            + self.beta_h.uncompressed_size()
-            + self.degree_bounds_and_neg_powers_of_h.uncompressed_size()
-            + self.supported_degree.uncompressed_size()
-            + self.max_degree.uncompressed_size()
+    fn serialized_size(&self, compress: Compress) -> usize {
+        self.g.serialized_size(compress)
+            + self.gamma_g.serialized_size(compress)
+            + self.h.serialized_size(compress)
+            + self.beta_h.serialized_size(compress)
+            + self
+                .degree_bounds_and_neg_powers_of_h
+                .serialized_size(compress)
+            + self.supported_degree.serialized_size(compress)
+            + self.max_degree.serialized_size(compress)
     }
 }
 
-impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
-    fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize(&mut reader)?;
-        let h = E::G2Affine::deserialize(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize(&mut reader)?;
-        let degree_bounds_and_neg_powers_of_h =
-            Option::<Vec<(usize, E::G2Affine)>>::deserialize(&mut reader)?;
-        let supported_degree = usize::deserialize(&mut reader)?;
-        let max_degree = usize::deserialize(&mut reader)?;
-
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
-
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h,
-            prepared_beta_h,
-            degree_bounds_and_neg_powers_of_h,
-            supported_degree,
-            max_degree,
-        })
-    }
-
-    fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?;
-        let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?;
+impl<E: Pairing> CanonicalDeserialize for VerifierKey<E> {
+    fn deserialize_with_mode<R: Read>(
+        mut reader: R,
+        compress: Compress,
+        validate: Validate,
+    ) -> Result<Self, SerializationError> {
+        let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?;
         let degree_bounds_and_neg_powers_of_h =
-            Option::<Vec<(usize, E::G2Affine)>>::deserialize_uncompressed(&mut reader)?;
-        let supported_degree = usize::deserialize_uncompressed(&mut reader)?;
-        let max_degree = usize::deserialize_uncompressed(&mut reader)?;
+            Option::<Vec<(usize, E::G2Affine)>>::deserialize_with_mode(
+                &mut reader,
+                compress,
+                Validate::No,
+            )?;
+        let supported_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
+        let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?;
 
         let prepared_h = E::G2Prepared::from(h.clone());
         let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
 
-        Ok(Self {
+        let result = Self {
             g,
             gamma_g,
             h,
@@ -274,37 +248,17 @@ impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
             degree_bounds_and_neg_powers_of_h,
             supported_degree,
             max_degree,
-        })
-    }
-
-    fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
-        let g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?;
-        let h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?;
-        let degree_bounds_and_neg_powers_of_h =
-            Option::<Vec<(usize, E::G2Affine)>>::deserialize_unchecked(&mut reader)?;
-        let supported_degree = usize::deserialize_unchecked(&mut reader)?;
-        let max_degree = usize::deserialize_unchecked(&mut reader)?;
+        };
 
-        let prepared_h = E::G2Prepared::from(h.clone());
-        let prepared_beta_h = E::G2Prepared::from(beta_h.clone());
+        if let Validate::Yes = validate {
+            result.check()?;
+        }
 
-        Ok(Self {
-            g,
-            gamma_g,
-            h,
-            beta_h,
-            prepared_h,
-            prepared_beta_h,
-            degree_bounds_and_neg_powers_of_h,
-            supported_degree,
-            max_degree,
-        })
+        Ok(result)
     }
 }
 
-impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
+impl<E: Pairing> PCVerifierKey for VerifierKey<E> {
     fn max_degree(&self) -> usize {
         self.max_degree
     }
@@ -317,7 +271,7 @@ impl<E: PairingEngine> PCVerifierKey for VerifierKey<E> {
 /// Nothing to do to prepare this verifier key (for now).
 pub type PreparedVerifierKey<E> = VerifierKey<E>;
 
-impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
+impl<E: Pairing> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifierKey<E> {
     /// prepare `PreparedVerifierKey` from `VerifierKey`
     fn prepare(vk: &VerifierKey<E>) -> Self {
         vk.clone()
@@ -334,4 +288,4 @@ impl<E: PairingEngine> PCPreparedVerifierKey<VerifierKey<E>> for PreparedVerifie
     PartialEq(bound = ""),
     Eq(bound = "")
 )]
-pub struct BatchProof<E: PairingEngine>(pub(crate) Vec<kzg10::Proof<E>>);
+pub struct BatchProof<E: Pairing>(pub(crate) Vec<kzg10::Proof<E>>);
diff --git a/src/sonic_pc/mod.rs b/src/sonic_pc/mod.rs
index 7c48de7..2e65dae 100644
--- a/src/sonic_pc/mod.rs
+++ b/src/sonic_pc/mod.rs
@@ -3,15 +3,17 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec};
 use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet};
 use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination};
 use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment};
+use ark_ec::AffineRepr;
+use ark_ec::CurveGroup;
 
-use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
+use ark_ec::pairing::Pairing;
 use ark_ff::{One, UniformRand, Zero};
 use ark_std::rand::RngCore;
 use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec};
 
 mod data_structures;
 use crate::challenge::ChallengeGenerator;
-use ark_sponge::CryptographicSponge;
+use ark_crypto_primitives::sponge::CryptographicSponge;
 pub use data_structures::*;
 
 /// Polynomial commitment based on [[KZG10]][kzg], with degree enforcement and
@@ -24,7 +26,7 @@ pub use data_structures::*;
 /// [sonic]: https://eprint.iacr.org/2019/099
 /// [al]: https://eprint.iacr.org/2019/601
 /// [marlin]: https://eprint.iacr.org/2019/1047
-pub struct SonicKZG10<E: PairingEngine, P: DenseUVPolynomial<E::Fr>, S: CryptographicSponge> {
+pub struct SonicKZG10<E: Pairing, P: DenseUVPolynomial<E::ScalarField>, S: CryptographicSponge> {
     _engine: PhantomData<E>,
     _poly: PhantomData<P>,
     _sponge: PhantomData<S>,
@@ -32,28 +34,28 @@ pub struct SonicKZG10<E: PairingEngine, P: DenseUVPolynomial<E::Fr>, S: Cryptogr
 
 impl<E, P, S> SonicKZG10<E, P, S>
 where
-    E: PairingEngine,
-    P: DenseUVPolynomial<E::Fr>,
+    E: Pairing,
+    P: DenseUVPolynomial<E::ScalarField>,
     S: CryptographicSponge,
 {
     fn accumulate_elems<'a>(
-        combined_comms: &mut BTreeMap<Option<usize>, E::G1Projective>,
-        combined_witness: &mut E::G1Projective,
-        combined_adjusted_witness: &mut E::G1Projective,
+        combined_comms: &mut BTreeMap<Option<usize>, E::G1>,
+        combined_witness: &mut E::G1,
+        combined_adjusted_witness: &mut E::G1,
         vk: &VerifierKey<E>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Commitment<E>>>,
         point: P::Point,
-        values: impl IntoIterator<Item = E::Fr>,
+        values: impl IntoIterator<Item = E::ScalarField>,
         proof: &kzg10::Proof<E>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
-        randomizer: Option<E::Fr>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
+        randomizer: Option<E::ScalarField>,
     ) {
         let acc_time = start_timer!(|| "Accumulating elements");
 
         let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE);
 
         // Keeps track of running combination of values
-        let mut combined_values = E::Fr::zero();
+        let mut combined_values = E::ScalarField::zero();
 
         // Iterates through all of the commitments and accumulates common degree_bound elements in a BTreeMap
         for (labeled_comm, value) in commitments.into_iter().zip(values) {
@@ -63,21 +65,19 @@ where
             let degree_bound = labeled_comm.degree_bound();
 
             // Applying opening challenge and randomness (used in batch_checking)
-            let mut comm_with_challenge: E::G1Projective = comm.0.mul(curr_challenge);
+            let mut comm_with_challenge: E::G1 = comm.0.mul(curr_challenge);
 
             if let Some(randomizer) = randomizer {
                 comm_with_challenge = comm_with_challenge.mul(&randomizer);
             }
 
             // Accumulate values in the BTreeMap
-            *combined_comms
-                .entry(degree_bound)
-                .or_insert(E::G1Projective::zero()) += &comm_with_challenge;
+            *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge;
             curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE);
         }
 
         // Push expected results into list of elems. Power will be the negative of the expected power
-        let mut witness: E::G1Projective = proof.w.into_projective();
+        let mut witness: E::G1 = proof.w.into_group();
         let mut adjusted_witness = vk.g.mul(combined_values) - &proof.w.mul(point);
         if let Some(random_v) = proof.random_v {
             adjusted_witness += &vk.gamma_g.mul(random_v);
@@ -94,13 +94,13 @@ where
     }
 
     fn check_elems(
-        combined_comms: BTreeMap<Option<usize>, E::G1Projective>,
-        combined_witness: E::G1Projective,
-        combined_adjusted_witness: E::G1Projective,
+        combined_comms: BTreeMap<Option<usize>, E::G1>,
+        combined_witness: E::G1,
+        combined_adjusted_witness: E::G1,
         vk: &VerifierKey<E>,
     ) -> Result<bool, Error> {
         let check_time = start_timer!(|| "Checking elems");
-        let mut g1_projective_elems: Vec<E::G1Projective> = Vec::new();
+        let mut g1_projective_elems: Vec<E::G1> = Vec::new();
         let mut g2_prepared_elems: Vec<E::G2Prepared> = Vec::new();
 
         for (degree_bound, comm) in combined_comms.into_iter() {
@@ -121,23 +121,24 @@ where
         g1_projective_elems.push(-combined_witness);
         g2_prepared_elems.push(vk.prepared_beta_h.clone());
 
-        let g1_prepared_elems_iter =
-            E::G1Projective::batch_normalization_into_affine(g1_projective_elems.as_slice())
+        let g1_prepared_elems_iter: Vec<E::G1Prepared> =
+            E::G1::normalize_batch(g1_projective_elems.as_slice())
                 .into_iter()
-                .map(|a| a.into());
+                .map(|a| a.into())
+                .collect::<Vec<_>>();
 
-        let g1_g2_prepared: Vec<(E::G1Prepared, E::G2Prepared)> =
-            g1_prepared_elems_iter.zip(g2_prepared_elems).collect();
-        let is_one: bool = E::product_of_pairings(g1_g2_prepared.iter()).is_one();
+        let is_one: bool = E::multi_pairing(g1_prepared_elems_iter, g2_prepared_elems)
+            .0
+            .is_one();
         end_timer!(check_time);
         Ok(is_one)
     }
 }
 
-impl<E, P, S> PolynomialCommitment<E::Fr, P, S> for SonicKZG10<E, P, S>
+impl<E, P, S> PolynomialCommitment<E::ScalarField, P, S> for SonicKZG10<E, P, S>
 where
-    E: PairingEngine,
-    P: DenseUVPolynomial<E::Fr, Point = E::Fr>,
+    E: Pairing,
+    P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
     S: CryptographicSponge,
     for<'a, 'b> &'a P: Div<&'b P, Output = P>,
 {
@@ -147,7 +148,7 @@ where
     type PreparedVerifierKey = PreparedVerifierKey<E>;
     type Commitment = Commitment<E>;
     type PreparedCommitment = PreparedCommitment<E>;
-    type Randomness = Randomness<E::Fr, P>;
+    type Randomness = Randomness<E::ScalarField, P>;
     type Proof = kzg10::Proof<E>;
     type BatchProof = Vec<Self::Proof>;
     type Error = Error;
@@ -277,7 +278,7 @@ where
     /// Outputs a commitment to `polynomial`.
     fn commit<'a>(
         ck: &Self::CommitterKey,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         rng: Option<&mut dyn RngCore>,
     ) -> Result<
         (
@@ -343,10 +344,10 @@ where
 
     fn open<'a>(
         ck: &Self::CommitterKey,
-        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         _commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &'a P::Point,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<Self::Proof, Self::Error>
@@ -389,18 +390,18 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         point: &'a P::Point,
-        values: impl IntoIterator<Item = E::Fr>,
+        values: impl IntoIterator<Item = E::ScalarField>,
         proof: &Self::Proof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         _rng: Option<&mut dyn RngCore>,
     ) -> Result<bool, Self::Error>
     where
         Self::Commitment: 'a,
     {
         let check_time = start_timer!(|| "Checking evaluations");
-        let mut combined_comms: BTreeMap<Option<usize>, E::G1Projective> = BTreeMap::new();
-        let mut combined_witness: E::G1Projective = E::G1Projective::zero();
-        let mut combined_adjusted_witness: E::G1Projective = E::G1Projective::zero();
+        let mut combined_comms: BTreeMap<Option<usize>, E::G1> = BTreeMap::new();
+        let mut combined_witness: E::G1 = E::G1::zero();
+        let mut combined_adjusted_witness: E::G1 = E::G1::zero();
 
         Self::accumulate_elems(
             &mut combined_comms,
@@ -429,9 +430,9 @@ where
         vk: &Self::VerifierKey,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        values: &Evaluations<E::Fr, P::Point>,
+        values: &Evaluations<E::ScalarField, P::Point>,
         proof: &Self::BatchProof,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -449,11 +450,11 @@ where
 
         assert_eq!(proof.len(), query_to_labels_map.len());
 
-        let mut randomizer = E::Fr::one();
+        let mut randomizer = E::ScalarField::one();
 
-        let mut combined_comms: BTreeMap<Option<usize>, E::G1Projective> = BTreeMap::new();
-        let mut combined_witness: E::G1Projective = E::G1Projective::zero();
-        let mut combined_adjusted_witness: E::G1Projective = E::G1Projective::zero();
+        let mut combined_comms: BTreeMap<Option<usize>, E::G1> = BTreeMap::new();
+        let mut combined_witness: E::G1 = E::G1::zero();
+        let mut combined_adjusted_witness: E::G1 = E::G1::zero();
 
         for ((_point_label, (point, labels)), p) in query_to_labels_map.into_iter().zip(proof) {
             let mut comms_to_combine: Vec<&'_ LabeledCommitment<_>> = Vec::new();
@@ -499,14 +500,14 @@ where
 
     fn open_combinations<'a>(
         ck: &Self::CommitterKey,
-        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
-        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::Fr, P>>,
+        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
+        polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<E::ScalarField, P>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         query_set: &QuerySet<P::Point>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rands: impl IntoIterator<Item = &'a Self::Randomness>,
         rng: Option<&mut dyn RngCore>,
-    ) -> Result<BatchLCProof<E::Fr, Self::BatchProof>, Self::Error>
+    ) -> Result<BatchLCProof<E::ScalarField, Self::BatchProof>, Self::Error>
     where
         Self::Randomness: 'a,
         Self::Commitment: 'a,
@@ -530,7 +531,7 @@ where
             let mut degree_bound = None;
             let mut hiding_bound = None;
             let mut randomness = Self::Randomness::empty();
-            let mut comm = E::G1Projective::zero();
+            let mut comm = E::G1::zero();
 
             let num_polys = lc.len();
             for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) {
@@ -566,11 +567,10 @@ where
             lc_info.push((lc_label, degree_bound));
         }
 
-        let comms: Vec<Self::Commitment> =
-            E::G1Projective::batch_normalization_into_affine(&lc_commitments)
-                .into_iter()
-                .map(|c| kzg10::Commitment::<E>(c))
-                .collect();
+        let comms: Vec<Self::Commitment> = E::G1::normalize_batch(&lc_commitments)
+            .into_iter()
+            .map(|c| kzg10::Commitment::<E>(c))
+            .collect();
 
         let lc_commitments = lc_info
             .into_iter()
@@ -594,12 +594,12 @@ where
     /// committed in `labeled_commitments`.
     fn check_combinations<'a, R: RngCore>(
         vk: &Self::VerifierKey,
-        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::Fr>>,
+        linear_combinations: impl IntoIterator<Item = &'a LinearCombination<E::ScalarField>>,
         commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
         eqn_query_set: &QuerySet<P::Point>,
-        eqn_evaluations: &Evaluations<P::Point, E::Fr>,
-        proof: &BatchLCProof<E::Fr, Self::BatchProof>,
-        opening_challenges: &mut ChallengeGenerator<E::Fr, S>,
+        eqn_evaluations: &Evaluations<P::Point, E::ScalarField>,
+        proof: &BatchLCProof<E::ScalarField, Self::BatchProof>,
+        opening_challenges: &mut ChallengeGenerator<E::ScalarField, S>,
         rng: &mut R,
     ) -> Result<bool, Self::Error>
     where
@@ -619,7 +619,7 @@ where
             let num_polys = lc.len();
 
             let mut degree_bound = None;
-            let mut combined_comm = E::G1Projective::zero();
+            let mut combined_comm = E::G1::zero();
 
             for (coeff, label) in lc.iter() {
                 if label.is_one() {
@@ -651,11 +651,10 @@ where
             lc_info.push((lc_label, degree_bound));
         }
 
-        let comms: Vec<Self::Commitment> =
-            E::G1Projective::batch_normalization_into_affine(&lc_commitments)
-                .into_iter()
-                .map(|c| kzg10::Commitment(c))
-                .collect();
+        let comms: Vec<Self::Commitment> = E::G1::normalize_batch(&lc_commitments)
+            .into_iter()
+            .map(|c| kzg10::Commitment(c))
+            .collect();
 
         let lc_commitments = lc_info
             .into_iter()
@@ -681,31 +680,31 @@ mod tests {
     use super::SonicKZG10;
     use ark_bls12_377::Bls12_377;
     use ark_bls12_381::Bls12_381;
-    use ark_ec::PairingEngine;
+    use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
+    use ark_ec::pairing::Pairing;
     use ark_ff::UniformRand;
     use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial};
-    use ark_sponge::poseidon::PoseidonSponge;
     use rand_chacha::ChaCha20Rng;
 
-    type UniPoly_381 = DensePoly<<Bls12_381 as PairingEngine>::Fr>;
-    type UniPoly_377 = DensePoly<<Bls12_377 as PairingEngine>::Fr>;
+    type UniPoly_381 = DensePoly<<Bls12_381 as Pairing>::ScalarField>;
+    type UniPoly_377 = DensePoly<<Bls12_377 as Pairing>::ScalarField>;
 
     type PC<E, P, S> = SonicKZG10<E, P, S>;
-    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as PairingEngine>::Fr>;
-    type Sponge_Bls12_381 = PoseidonSponge<<Bls12_381 as PairingEngine>::Fr>;
+    type Sponge_Bls12_377 = PoseidonSponge<<Bls12_377 as Pairing>::ScalarField>;
+    type Sponge_Bls12_381 = PoseidonSponge<<Bls12_381 as Pairing>::ScalarField>;
     type PC_Bls12_377 = PC<Bls12_377, UniPoly_377, Sponge_Bls12_377>;
     type PC_Bls12_381 = PC<Bls12_381, UniPoly_381, Sponge_Bls12_381>;
 
-    fn rand_poly<E: PairingEngine>(
+    fn rand_poly<E: Pairing>(
         degree: usize,
         _: Option<usize>,
         rng: &mut ChaCha20Rng,
-    ) -> DensePoly<E::Fr> {
-        DensePoly::<E::Fr>::rand(degree, rng)
+    ) -> DensePoly<E::ScalarField> {
+        DensePoly::<E::ScalarField>::rand(degree, rng)
     }
 
-    fn rand_point<E: PairingEngine>(_: Option<usize>, rng: &mut ChaCha20Rng) -> E::Fr {
-        E::Fr::rand(rng)
+    fn rand_point<E: Pairing>(_: Option<usize>, rng: &mut ChaCha20Rng) -> E::ScalarField {
+        E::ScalarField::rand(rng)
     }
 
     #[test]
diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs
index 4c5e838..e3bdb2a 100644
--- a/src/streaming_kzg/mod.rs
+++ b/src/streaming_kzg/mod.rs
@@ -86,8 +86,8 @@ mod data_structures;
 mod space;
 mod time;
 
-use ark_ec::ProjectiveCurve;
-use ark_serialize::CanonicalSerialize;
+use ark_ec::CurveGroup;
+use ark_serialize::{CanonicalSerialize, Compress};
 use ark_std::vec::Vec;
 pub use data_structures::*;
 pub use space::CommitterKeyStream;
@@ -103,40 +103,39 @@ use ark_std::ops::{Add, Mul};
 use ark_std::borrow::Borrow;
 use ark_std::fmt;
 
-use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine};
+use ark_ec::{pairing::Pairing, AffineRepr, VariableBaseMSM};
 
 /// A Kate polynomial commitment over a bilinear group, represented as a single \\(\GG_1\\) element.
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
-pub struct Commitment<E: PairingEngine>(pub(crate) E::G1Affine);
+pub struct Commitment<E: Pairing>(pub(crate) E::G1Affine);
 
-impl<E: PairingEngine> Commitment<E> {
+impl<E: Pairing> Commitment<E> {
     /// Return the size of Commitment in bytes.
     pub fn size_in_bytes(&self) -> usize {
-        // ark_ff::to_bytes![E::G1Affine::zero()].unwrap().len() / 2
-        E::G1Affine::zero().serialized_size() / 2
+        E::G1Affine::zero().serialized_size(Compress::Yes)
     }
 }
 
 #[inline]
-fn msm<E: PairingEngine>(bases: &[E::G1Affine], scalars: &[E::Fr]) -> E::G1Affine {
+fn msm<E: Pairing>(bases: &[E::G1Affine], scalars: &[E::ScalarField]) -> E::G1Affine {
     let scalars = scalars.iter().map(|x| x.into_bigint()).collect::<Vec<_>>();
-    let sp = <E::G1Projective as VariableBaseMSM>::msm_bigint(bases, &scalars);
+    let sp = <E::G1 as VariableBaseMSM>::msm_bigint(bases, &scalars);
     sp.into_affine()
 }
 
 /// Polynomial evaluation proof, represented as a single \\(\GG_1\\) element.
 #[derive(Clone, Debug, PartialEq, Eq)]
-pub struct EvaluationProof<E: PairingEngine>(pub E::G1Affine);
+pub struct EvaluationProof<E: Pairing>(pub E::G1Affine);
 
-impl<E: PairingEngine> Add for EvaluationProof<E> {
+impl<E: Pairing> Add for EvaluationProof<E> {
     type Output = Self;
 
     fn add(self, rhs: Self) -> Self::Output {
-        EvaluationProof(self.0 + rhs.0)
+        EvaluationProof((self.0 + rhs.0).into_affine())
     }
 }
 
-impl<E: PairingEngine> core::iter::Sum for EvaluationProof<E> {
+impl<E: Pairing> core::iter::Sum for EvaluationProof<E> {
     fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
         let zero = EvaluationProof(E::G1Affine::zero());
         iter.fold(zero, |x, y| x + y)
@@ -159,27 +158,27 @@ pub(crate) type VerificationResult = Result<(), VerificationError>;
 /// The verification key for the polynomial commitment scheme.
 /// It also implements verification functions for the evaluation proof.
 #[derive(Debug, PartialEq, Eq)]
-pub struct VerifierKey<E: PairingEngine> {
+pub struct VerifierKey<E: Pairing> {
     /// The generator of  \\(\GG_1\\)
     powers_of_g: Vec<E::G1Affine>,
     /// The generator og \\(\GG_2\\), together with its multiplication by the trapdoor.
     powers_of_g2: Vec<E::G2Affine>,
 }
 
-impl<E: PairingEngine> VerifierKey<E> {
+impl<E: Pairing> VerifierKey<E> {
     /// The verification procedure for the EvaluationProof with a single polynomial evaluated at a single evaluation point.
     /// The polynomial are evaluated at the point ``alpha`` and is committed as ``commitment``.
     /// The evaluation proof can be obtained either in a space-efficient or a time-efficient flavour.
     pub fn verify(
         &self,
         commitment: &Commitment<E>,
-        &alpha: &E::Fr,
-        evaluation: &E::Fr,
+        &alpha: &E::ScalarField,
+        evaluation: &E::ScalarField,
         proof: &EvaluationProof<E>,
     ) -> VerificationResult {
-        let scalars = [(-alpha).into_bigint(), E::Fr::one().into_bigint()];
-        let ep = <E::G2Projective as VariableBaseMSM>::msm_bigint(&self.powers_of_g2, &scalars);
-        let lhs = commitment.0.into_projective() - self.powers_of_g[0].mul(evaluation);
+        let scalars = [(-alpha).into_bigint(), E::ScalarField::one().into_bigint()];
+        let ep = <E::G2 as VariableBaseMSM>::msm_bigint(&self.powers_of_g2, &scalars);
+        let lhs = commitment.0.into_group() - self.powers_of_g[0].mul(evaluation);
         let g2 = self.powers_of_g2[0];
 
         if E::pairing(lhs, g2) == E::pairing(proof.0, ep) {
@@ -198,21 +197,20 @@ impl<E: PairingEngine> VerifierKey<E> {
     pub fn verify_multi_points(
         &self,
         commitments: &[Commitment<E>],
-        eval_points: &[E::Fr],
-        evaluations: &[Vec<E::Fr>],
+        eval_points: &[E::ScalarField],
+        evaluations: &[Vec<E::ScalarField>],
         proof: &EvaluationProof<E>,
-        open_chal: &E::Fr,
+        open_chal: &E::ScalarField,
     ) -> VerificationResult {
         // Computing the vanishing polynomial over eval_points
         let zeros = vanishing_polynomial(eval_points);
         let zeros_repr = zeros.iter().map(|x| x.into_bigint()).collect::<Vec<_>>();
-        let zeros =
-            <E::G2Projective as VariableBaseMSM>::msm_bigint(&self.powers_of_g2, &zeros_repr);
+        let zeros = <E::G2 as VariableBaseMSM>::msm_bigint(&self.powers_of_g2, &zeros_repr);
 
         // Computing the inverse for the interpolation
         let mut sca_inverse = Vec::new();
         for (j, x_j) in eval_points.iter().enumerate() {
-            let mut sca = E::Fr::one();
+            let mut sca = E::ScalarField::one();
             for (k, x_k) in eval_points.iter().enumerate() {
                 if j == k {
                     continue;
@@ -226,12 +224,13 @@ impl<E: PairingEngine> VerifierKey<E> {
         // Computing the lagrange polynomial for the interpolation
         let mut lang = Vec::new();
         for (j, _x_j) in eval_points.iter().enumerate() {
-            let mut l_poly = DensePolynomial::from_coefficients_vec(vec![E::Fr::one()]);
+            let mut l_poly = DensePolynomial::from_coefficients_vec(vec![E::ScalarField::one()]);
             for (k, x_k) in eval_points.iter().enumerate() {
                 if j == k {
                     continue;
                 }
-                let tmp_poly = DensePolynomial::from_coefficients_vec(vec![-(*x_k), E::Fr::one()]);
+                let tmp_poly =
+                    DensePolynomial::from_coefficients_vec(vec![-(*x_k), E::ScalarField::one()]);
                 l_poly = l_poly.mul(&tmp_poly);
             }
             lang.push(l_poly);
@@ -250,11 +249,11 @@ impl<E: PairingEngine> VerifierKey<E> {
         // Gathering commitments
         let comm_vec = commitments.iter().map(|x| x.0).collect::<Vec<_>>();
         let etas_repr = etas.iter().map(|e| e.into_bigint()).collect::<Vec<_>>();
-        let f_comm = <E::G1Projective as VariableBaseMSM>::msm_bigint(&comm_vec, &etas_repr);
+        let f_comm = <E::G1 as VariableBaseMSM>::msm_bigint(&comm_vec, &etas_repr);
 
         let g2 = self.powers_of_g2[0];
 
-        if E::pairing(f_comm - i_comm.into_projective(), g2) == E::pairing(proof.0, zeros) {
+        if E::pairing(f_comm - i_comm.into_group(), g2) == E::pairing(proof.0, zeros) {
             Ok(())
         } else {
             Err(VerificationError)
@@ -262,13 +261,13 @@ impl<E: PairingEngine> VerifierKey<E> {
     }
 }
 
-fn interpolate_poly<E: PairingEngine>(
-    eval_points: &[E::Fr],
-    evals: &[E::Fr],
-    sca_inverse: &[E::Fr],
-    lang: &[DensePolynomial<E::Fr>],
-) -> DensePolynomial<E::Fr> {
-    let mut res = DensePolynomial::from_coefficients_vec(vec![E::Fr::zero()]);
+fn interpolate_poly<E: Pairing>(
+    eval_points: &[E::ScalarField],
+    evals: &[E::ScalarField],
+    sca_inverse: &[E::ScalarField],
+    lang: &[DensePolynomial<E::ScalarField>],
+) -> DensePolynomial<E::ScalarField> {
+    let mut res = DensePolynomial::from_coefficients_vec(vec![E::ScalarField::zero()]);
     for (j, (_x_j, y_j)) in eval_points.iter().zip(evals.iter()).enumerate() {
         let l_poly = lang[j].mul(sca_inverse[j] * y_j);
         res = (&res).add(&l_poly);
diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs
index 4fd4d13..e8ab2d4 100644
--- a/src/streaming_kzg/space.rs
+++ b/src/streaming_kzg/space.rs
@@ -1,5 +1,5 @@
 //! Space-efficient implementation of the polynomial commitment of Kate et al.
-use ark_ec::{PairingEngine, ProjectiveCurve};
+use ark_ec::{pairing::Pairing, CurveGroup};
 use ark_ff::{PrimeField, Zero};
 use ark_poly::Polynomial;
 use ark_std::borrow::Borrow;
@@ -7,7 +7,7 @@ use ark_std::collections::VecDeque;
 use ark_std::vec::Vec;
 
 use crate::streaming_kzg::{ceil_div, vanishing_polynomial, FoldedPolynomialTree};
-use ark_ec::msm::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM};
+use ark_ec::scalar_mul::variable_base::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM};
 use ark_std::iterable::{Iterable, Reverse};
 
 use super::{time::CommitterKey, VerifierKey};
@@ -20,7 +20,7 @@ const LENGTH_MISMATCH_MSG: &str = "Expecting at least one element in the committ
 #[derive(Clone)]
 pub struct CommitterKeyStream<E, SG>
 where
-    E: PairingEngine,
+    E: Pairing,
     SG: Iterable,
     SG::Item: Borrow<E::G1Affine>,
 {
@@ -32,7 +32,7 @@ where
 
 impl<E, SG> CommitterKeyStream<E, SG>
 where
-    E: PairingEngine,
+    E: Pairing,
     SG: Iterable,
     SG::Item: Borrow<E::G1Affine>,
 {
@@ -57,14 +57,14 @@ where
     pub fn open<SF>(
         &self,
         polynomial: &SF,
-        alpha: &E::Fr,
+        alpha: &E::ScalarField,
         max_msm_buffer: usize,
-    ) -> (E::Fr, EvaluationProof<E>)
+    ) -> (E::ScalarField, EvaluationProof<E>)
     where
         SF: Iterable,
-        SF::Item: Borrow<E::Fr>,
+        SF::Item: Borrow<E::ScalarField>,
     {
-        let mut quotient = ChunkedPippenger::new(max_msm_buffer);
+        let mut quotient: ChunkedPippenger<E::G1> = ChunkedPippenger::new(max_msm_buffer);
 
         let bases_init = self.powers_of_g.iter();
         let scalars = polynomial.iter();
@@ -74,7 +74,7 @@ where
         // See <https://github.com/rust-lang/rust/issues/77404>
         let bases = bases_init.skip(self.powers_of_g.len() - polynomial.len());
 
-        let mut previous = E::Fr::zero();
+        let mut previous = E::ScalarField::zero();
         for (scalar, base) in scalars.zip(bases) {
             quotient.add(base, previous.into_bigint());
             let coefficient = previous * alpha + scalar.borrow();
@@ -90,21 +90,21 @@ where
     pub fn open_multi_points<SF>(
         &self,
         polynomial: &SF,
-        points: &[E::Fr],
+        points: &[E::ScalarField],
         max_msm_buffer: usize,
-    ) -> (Vec<E::Fr>, EvaluationProof<E>)
+    ) -> (Vec<E::ScalarField>, EvaluationProof<E>)
     where
         SF: Iterable,
-        SF::Item: Borrow<E::Fr>,
+        SF::Item: Borrow<E::ScalarField>,
     {
         let zeros = vanishing_polynomial(points);
-        let mut quotient = ChunkedPippenger::new(max_msm_buffer);
+        let mut quotient: ChunkedPippenger<E::G1> = ChunkedPippenger::new(max_msm_buffer);
         let bases_init = self.powers_of_g.iter();
         // TODO: change `skip` to `advance_by` once rust-lang/rust#7774 is fixed.
         // See <https://github.com/rust-lang/rust/issues/77404>
         let mut bases = bases_init.skip(self.powers_of_g.len() - polynomial.len() + zeros.degree());
 
-        let mut state = VecDeque::<E::Fr>::with_capacity(points.len());
+        let mut state = VecDeque::<E::ScalarField>::with_capacity(points.len());
 
         let mut polynomial_iterator = polynomial.iter();
 
@@ -131,13 +131,12 @@ where
     pub fn commit<SF: ?Sized>(&self, polynomial: &SF) -> Commitment<E>
     where
         SF: Iterable,
-        SF::Item: Borrow<E::Fr>,
+        SF::Item: Borrow<E::ScalarField>,
     {
         assert!(self.powers_of_g.len() >= polynomial.len());
 
         Commitment(
-            <E::G1Projective as VariableBaseMSM>::msm_chunks(&self.powers_of_g, polynomial)
-                .into_affine(),
+            <E::G1 as VariableBaseMSM>::msm_chunks(&self.powers_of_g, polynomial).into_affine(),
         )
     }
 
@@ -147,7 +146,7 @@ where
         polynomials: &[&'a dyn Iterable<Item = F, Iter = &mut dyn Iterator<Item = F>>],
     ) -> Vec<Commitment<E>>
     where
-        F: Borrow<E::Fr>,
+        F: Borrow<E::ScalarField>,
     {
         polynomials.iter().map(|&p| self.commit(p)).collect()
     }
@@ -157,18 +156,19 @@ where
     /// The function takes as input a committer key and the tree structure of all the folding polynomials, and produces the desired commitment for each polynomial.
     pub fn commit_folding<SF>(
         &self,
-        polynomials: &FoldedPolynomialTree<E::Fr, SF>,
+        polynomials: &FoldedPolynomialTree<E::ScalarField, SF>,
         max_msm_buffer: usize,
     ) -> Vec<Commitment<E>>
     where
         SF: Iterable,
-        SF::Item: Borrow<E::Fr>,
+        SF::Item: Borrow<E::ScalarField>,
     {
         let n = polynomials.depth();
-        let mut pippengers: Vec<ChunkedPippenger<E::G1Affine>> = Vec::new();
+        let mut pippengers: Vec<ChunkedPippenger<E::G1>> = Vec::new();
         let mut folded_bases = Vec::new();
         for i in 1..n + 1 {
-            let pippenger = ChunkedPippenger::with_size(max_msm_buffer / n);
+            let pippenger: ChunkedPippenger<<E as Pairing>::G1> =
+                ChunkedPippenger::with_size(max_msm_buffer / n);
             let bases_init = self.powers_of_g.iter();
 
             let delta = self.powers_of_g.len() - ceil_div(polynomials.len(), 1 << i);
@@ -196,20 +196,20 @@ where
     /// `eta` is the random challenge for batching folding polynomials.
     pub fn open_folding<'a, SF>(
         &self,
-        polynomials: FoldedPolynomialTree<'a, E::Fr, SF>,
-        points: &[E::Fr],
-        etas: &[E::Fr],
+        polynomials: FoldedPolynomialTree<'a, E::ScalarField, SF>,
+        points: &[E::ScalarField],
+        etas: &[E::ScalarField],
         max_msm_buffer: usize,
-    ) -> (Vec<Vec<E::Fr>>, EvaluationProof<E>)
+    ) -> (Vec<Vec<E::ScalarField>>, EvaluationProof<E>)
     where
         SG: Iterable,
         SF: Iterable,
-        E: PairingEngine,
+        E: Pairing,
         SG::Item: Borrow<E::G1Affine>,
-        SF::Item: Borrow<E::Fr> + Copy,
+        SF::Item: Borrow<E::ScalarField> + Copy,
     {
         let n = polynomials.depth();
-        let mut pippenger = HashMapPippenger::<E::G1Affine>::new(max_msm_buffer);
+        let mut pippenger = HashMapPippenger::<E::G1>::new(max_msm_buffer);
         let mut folded_bases = Vec::new();
         let zeros = vanishing_polynomial(points);
         let mut remainders = vec![VecDeque::new(); n];
@@ -222,7 +222,7 @@ where
             let bases = bases_init.skip(delta);
 
             (0..points.len()).for_each(|_| {
-                remainders[i - 1].push_back(E::Fr::zero());
+                remainders[i - 1].push_back(E::ScalarField::zero());
             });
 
             folded_bases.push(bases);
@@ -255,7 +255,7 @@ where
     }
 }
 
-impl<'a, E: PairingEngine> From<&'a CommitterKey<E>>
+impl<'a, E: Pairing> From<&'a CommitterKey<E>>
     for CommitterKeyStream<E, Reverse<&'a [E::G1Affine]>>
 {
     fn from(ck: &'a CommitterKey<E>) -> Self {
@@ -268,7 +268,7 @@ impl<'a, E: PairingEngine> From<&'a CommitterKey<E>>
 
 impl<E, SG> From<&CommitterKeyStream<E, SG>> for VerifierKey<E>
 where
-    E: PairingEngine,
+    E: Pairing,
     SG: Iterable,
     SG::Item: Borrow<E::G1Affine>,
 {
diff --git a/src/streaming_kzg/time.rs b/src/streaming_kzg/time.rs
index 251a0e9..8c7fa2f 100644
--- a/src/streaming_kzg/time.rs
+++ b/src/streaming_kzg/time.rs
@@ -1,15 +1,11 @@
 //! An impementation of a time-efficient version of Kate et al's polynomial commitment,
 //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf).
-use ark_ec::msm::FixedBase;
-use ark_ec::PairingEngine;
-use ark_ec::ProjectiveCurve;
+use ark_ec::pairing::Pairing;
+use ark_ec::scalar_mul::fixed_base::FixedBase;
+use ark_ec::CurveGroup;
 use ark_ff::{PrimeField, Zero};
 use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
-use ark_std::borrow::Borrow;
-use ark_std::ops::{Div, Mul};
-use ark_std::rand::RngCore;
-use ark_std::vec::Vec;
-use ark_std::UniformRand;
+use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand};
 
 use crate::streaming_kzg::{
     linear_combination, msm, powers, Commitment, EvaluationProof, VerifierKey,
@@ -23,12 +19,12 @@ use super::vanishing_polynomial;
 /// plus the `max_eval_degree` powers over \\(\GG_2\\),
 /// where `max_degree` is the max polynomial degree to commit to,
 /// and `max_eval_degree` is the max number of different points to open simultaneously.
-pub struct CommitterKey<E: PairingEngine> {
+pub struct CommitterKey<E: Pairing> {
     pub(crate) powers_of_g: Vec<E::G1Affine>,
     pub(crate) powers_of_g2: Vec<E::G2Affine>,
 }
 
-impl<E: PairingEngine> From<&CommitterKey<E>> for VerifierKey<E> {
+impl<E: Pairing> From<&CommitterKey<E>> for VerifierKey<E> {
     fn from(ck: &CommitterKey<E>) -> VerifierKey<E> {
         let max_eval_points = ck.max_eval_points();
         let powers_of_g2 = ck.powers_of_g2[..max_eval_points + 1].to_vec();
@@ -41,7 +37,7 @@ impl<E: PairingEngine> From<&CommitterKey<E>> for VerifierKey<E> {
     }
 }
 
-impl<E: PairingEngine> CommitterKey<E> {
+impl<E: Pairing> CommitterKey<E> {
     /// The setup algorithm for the commitment scheme.
     ///
     /// Given a degree bound `max_degree`,
@@ -50,17 +46,17 @@ impl<E: PairingEngine> CommitterKey<E> {
     /// construct the committer key.
     pub fn new(max_degree: usize, max_eval_points: usize, rng: &mut impl RngCore) -> Self {
         // Compute the consecutive powers of an element.
-        let tau = E::Fr::rand(rng);
+        let tau = E::ScalarField::rand(rng);
         let powers_of_tau = powers(tau, max_degree + 1);
 
-        let g = E::G1Projective::rand(rng);
+        let g = E::G1::rand(rng);
         let window_size = FixedBase::get_mul_window_size(max_degree + 1);
-        let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize;
+        let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize;
         let g_table = FixedBase::get_window_table(scalar_bits, window_size, g);
         let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau);
-        let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g_proj);
+        let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj);
 
-        let g2 = E::G2Projective::rand(rng).into_affine();
+        let g2 = E::G2::rand(rng).into_affine();
         let powers_of_g2 = powers_of_tau
             .iter()
             .take(max_eval_points + 1)
@@ -80,20 +76,20 @@ impl<E: PairingEngine> CommitterKey<E> {
     }
 
     /// Given a polynomial `polynomial` of degree less than `max_degree`, return a commitment to `polynomial`.
-    pub fn commit(&self, polynomial: &[E::Fr]) -> Commitment<E> {
+    pub fn commit(&self, polynomial: &[E::ScalarField]) -> Commitment<E> {
         Commitment(msm::<E>(&self.powers_of_g, polynomial))
     }
 
     /// Obtain a new preprocessed committer key defined by the indices `indices`.
     pub fn index_by(&self, indices: &[usize]) -> Self {
-        let mut indexed_powers_of_g = vec![E::G1Affine::zero(); self.powers_of_g.len()];
+        let mut indexed_powers_of_g = vec![E::G1::zero(); self.powers_of_g.len()];
         indices
             .iter()
             .zip(self.powers_of_g.iter())
             .for_each(|(&i, &g)| indexed_powers_of_g[i] = indexed_powers_of_g[i] + g);
         Self {
             powers_of_g2: self.powers_of_g2.clone(),
-            powers_of_g: indexed_powers_of_g,
+            powers_of_g: E::G1::normalize_batch(indexed_powers_of_g.as_slice()),
         }
     }
 
@@ -101,7 +97,7 @@ impl<E: PairingEngine> CommitterKey<E> {
     pub fn batch_commit<J>(&self, polynomials: J) -> Vec<Commitment<E>>
     where
         J: IntoIterator,
-        J::Item: Borrow<Vec<E::Fr>>,
+        J::Item: Borrow<Vec<E::ScalarField>>,
     {
         polynomials
             .into_iter()
@@ -114,19 +110,21 @@ impl<E: PairingEngine> CommitterKey<E> {
     /// together with an evaluation proof.
     pub fn open(
         &self,
-        polynomial: &[E::Fr],
-        evalualtion_point: &E::Fr,
-    ) -> (E::Fr, EvaluationProof<E>) {
+        polynomial: &[E::ScalarField],
+        evalualtion_point: &E::ScalarField,
+    ) -> (E::ScalarField, EvaluationProof<E>) {
         let mut quotient = Vec::new();
 
-        let mut previous = E::Fr::zero();
+        let mut previous = E::ScalarField::zero();
         for &c in polynomial.iter().rev() {
             let coefficient = c + previous * evalualtion_point;
             quotient.insert(0, coefficient);
             previous = coefficient;
         }
 
-        let (&evaluation, quotient) = quotient.split_first().unwrap_or((&E::Fr::zero(), &[]));
+        let (&evaluation, quotient) = quotient
+            .split_first()
+            .unwrap_or((&E::ScalarField::zero(), &[]));
         let evaluation_proof = msm::<E>(&self.powers_of_g, quotient);
         (evaluation, EvaluationProof(evaluation_proof))
     }
@@ -134,8 +132,8 @@ impl<E: PairingEngine> CommitterKey<E> {
     /// Evaluate a single polynomial at a set of points `eval_points`, and provide a single evaluation proof.
     pub fn open_multi_points(
         &self,
-        polynomial: &[E::Fr],
-        eval_points: &[E::Fr],
+        polynomial: &[E::ScalarField],
+        eval_points: &[E::ScalarField],
     ) -> EvaluationProof<E> {
         // Computing the vanishing polynomial over eval_points
         let z_poly = vanishing_polynomial(eval_points);
@@ -149,14 +147,14 @@ impl<E: PairingEngine> CommitterKey<E> {
     /// `eval_chal` is the random challenge for batching evaluation proofs across different polynomials.
     pub fn batch_open_multi_points(
         &self,
-        polynomials: &[&Vec<E::Fr>],
-        eval_points: &[E::Fr],
-        eval_chal: &E::Fr,
+        polynomials: &[&Vec<E::ScalarField>],
+        eval_points: &[E::ScalarField],
+        eval_chal: &E::ScalarField,
     ) -> EvaluationProof<E> {
         assert!(eval_points.len() < self.powers_of_g2.len());
         let etas = powers(*eval_chal, polynomials.len());
         let batched_polynomial =
-            linear_combination(polynomials, &etas).unwrap_or_else(|| vec![E::Fr::zero()]);
+            linear_combination(polynomials, &etas).unwrap_or_else(|| vec![E::ScalarField::zero()]);
         self.open_multi_points(&batched_polynomial, eval_points)
     }
 }
-- 
GitLab