1use ark_serialize::{
2 CanonicalDeserialize, CanonicalDeserializeWithFlags, CanonicalSerialize,
3 CanonicalSerializeWithFlags, Compress, EmptyFlags, Flags, SerializationError, Valid, Validate,
4};
5use ark_std::{
6 cmp::{Ord, Ordering, PartialOrd},
7 fmt,
8 io::{Read, Write},
9 iter::Chain,
10 ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
11 vec::Vec,
12};
13
14use num_traits::{One, Zero};
15use zeroize::Zeroize;
16
17use ark_std::rand::{
18 distributions::{Distribution, Standard},
19 Rng,
20};
21
22use crate::{
23 biginteger::BigInteger,
24 fields::{Field, LegendreSymbol, PrimeField},
25 SqrtPrecomputation, ToConstraintField, UniformRand,
26};
27
28pub trait QuadExtConfig: 'static + Send + Sync + Sized {
30 type BasePrimeField: PrimeField;
32 type BaseField: Field<BasePrimeField = Self::BasePrimeField>;
38 type FrobCoeff: Field;
41
42 const DEGREE_OVER_BASE_PRIME_FIELD: usize;
44
45 const NONRESIDUE: Self::BaseField;
47
48 const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff];
50
51 #[inline(always)]
55 fn mul_base_field_by_nonresidue_in_place(fe: &mut Self::BaseField) -> &mut Self::BaseField {
56 *fe *= &Self::NONRESIDUE;
57 fe
58 }
59
60 #[inline(always)]
64 fn mul_base_field_by_nonresidue_and_add(y: &mut Self::BaseField, x: &Self::BaseField) {
65 Self::mul_base_field_by_nonresidue_in_place(y);
66 *y += x;
67 }
68
69 #[inline(always)]
72 fn mul_base_field_by_nonresidue_plus_one_and_add(y: &mut Self::BaseField, x: &Self::BaseField) {
73 let old_y = *y;
74 Self::mul_base_field_by_nonresidue_and_add(y, x);
75 *y += old_y;
76 }
77
78 #[inline(always)]
82 fn sub_and_mul_base_field_by_nonresidue(y: &mut Self::BaseField, x: &Self::BaseField) {
83 Self::mul_base_field_by_nonresidue_in_place(y);
84 let mut result = *x;
85 result -= &*y;
86 *y = result;
87 }
88
89 fn mul_base_field_by_frob_coeff(fe: &mut Self::BaseField, power: usize);
92}
93
94#[derive(Derivative)]
97#[derivative(
98 Default(bound = "P: QuadExtConfig"),
99 Hash(bound = "P: QuadExtConfig"),
100 Clone(bound = "P: QuadExtConfig"),
101 Copy(bound = "P: QuadExtConfig"),
102 Debug(bound = "P: QuadExtConfig"),
103 PartialEq(bound = "P: QuadExtConfig"),
104 Eq(bound = "P: QuadExtConfig")
105)]
106pub struct QuadExtField<P: QuadExtConfig> {
107 pub c0: P::BaseField,
109 pub c1: P::BaseField,
111}
112
113impl<P: QuadExtConfig> QuadExtField<P> {
114 pub const fn new(c0: P::BaseField, c1: P::BaseField) -> Self {
129 Self { c0, c1 }
130 }
131
132 pub fn conjugate_in_place(&mut self) -> &mut Self {
135 self.c1 = -self.c1;
136 self
137 }
138
139 pub fn norm(&self) -> P::BaseField {
162 let mut result = self.c1.square();
164 P::sub_and_mul_base_field_by_nonresidue(&mut result, &self.c0.square());
165 result
166 }
167
168 pub fn mul_assign_by_basefield(&mut self, element: &P::BaseField) {
171 self.c0 *= element;
172 self.c1 *= element;
173 }
174}
175
176impl<P: QuadExtConfig> Zero for QuadExtField<P> {
177 fn zero() -> Self {
178 QuadExtField::new(P::BaseField::zero(), P::BaseField::zero())
179 }
180
181 fn is_zero(&self) -> bool {
182 self.c0.is_zero() && self.c1.is_zero()
183 }
184}
185
186impl<P: QuadExtConfig> One for QuadExtField<P> {
187 fn one() -> Self {
188 QuadExtField::new(P::BaseField::one(), P::BaseField::zero())
189 }
190
191 fn is_one(&self) -> bool {
192 self.c0.is_one() && self.c1.is_zero()
193 }
194}
195
196type BaseFieldIter<P> = <<P as QuadExtConfig>::BaseField as Field>::BasePrimeFieldIter;
197impl<P: QuadExtConfig> Field for QuadExtField<P> {
198 type BasePrimeField = P::BasePrimeField;
199
200 type BasePrimeFieldIter = Chain<BaseFieldIter<P>, BaseFieldIter<P>>;
201
202 const SQRT_PRECOMP: Option<SqrtPrecomputation<Self>> = None;
203
204 const ZERO: Self = Self::new(P::BaseField::ZERO, P::BaseField::ZERO);
205 const ONE: Self = Self::new(P::BaseField::ONE, P::BaseField::ZERO);
206
207 fn extension_degree() -> u64 {
208 2 * P::BaseField::extension_degree()
209 }
210
211 fn from_base_prime_field(elem: Self::BasePrimeField) -> Self {
212 let fe = P::BaseField::from_base_prime_field(elem);
213 Self::new(fe, P::BaseField::ZERO)
214 }
215
216 fn to_base_prime_field_elements(&self) -> Self::BasePrimeFieldIter {
217 self.c0
218 .to_base_prime_field_elements()
219 .chain(self.c1.to_base_prime_field_elements())
220 }
221
222 fn from_base_prime_field_elems(elems: &[Self::BasePrimeField]) -> Option<Self> {
223 if elems.len() != (Self::extension_degree() as usize) {
224 return None;
225 }
226 let base_ext_deg = P::BaseField::extension_degree() as usize;
227 Some(Self::new(
228 P::BaseField::from_base_prime_field_elems(&elems[0..base_ext_deg]).unwrap(),
229 P::BaseField::from_base_prime_field_elems(&elems[base_ext_deg..]).unwrap(),
230 ))
231 }
232
233 fn double(&self) -> Self {
234 let mut result = *self;
235 result.double_in_place();
236 result
237 }
238
239 fn double_in_place(&mut self) -> &mut Self {
240 self.c0.double_in_place();
241 self.c1.double_in_place();
242 self
243 }
244
245 fn neg_in_place(&mut self) -> &mut Self {
246 self.c0.neg_in_place();
247 self.c1.neg_in_place();
248 self
249 }
250
251 fn square(&self) -> Self {
252 let mut result = *self;
253 result.square_in_place();
254 result
255 }
256
257 #[inline]
258 fn from_random_bytes_with_flags<F: Flags>(bytes: &[u8]) -> Option<(Self, F)> {
259 let split_at = bytes.len() / 2;
260 if let Some(c0) = P::BaseField::from_random_bytes(&bytes[..split_at]) {
261 if let Some((c1, flags)) =
262 P::BaseField::from_random_bytes_with_flags(&bytes[split_at..])
263 {
264 return Some((QuadExtField::new(c0, c1), flags));
265 }
266 }
267 None
268 }
269
270 #[inline]
271 fn from_random_bytes(bytes: &[u8]) -> Option<Self> {
272 Self::from_random_bytes_with_flags::<EmptyFlags>(bytes).map(|f| f.0)
273 }
274
275 fn square_in_place(&mut self) -> &mut Self {
276 if P::NONRESIDUE == -P::BaseField::ONE {
283 let c0_copy = self.c0;
287 let mut v0 = self.c0;
289 v0 -= &self.c1;
290 self.c0 += self.c1;
291 self.c0 *= &v0;
294
295 self.c1.double_in_place();
297 self.c1 *= &c0_copy;
300
301 self
302 } else {
303 let mut v0 = self.c0 - &self.c1;
305 let mut v3 = self.c1;
307 P::sub_and_mul_base_field_by_nonresidue(&mut v3, &self.c0);
308 let v2 = self.c0 * &self.c1;
310
311 v0 *= &v3;
315
316 self.c1 = v2;
318 self.c1.double_in_place();
319 self.c0 = v2;
323 P::mul_base_field_by_nonresidue_plus_one_and_add(&mut self.c0, &v0);
324
325 self
326 }
327 }
328
329 fn inverse(&self) -> Option<Self> {
330 if self.is_zero() {
331 None
332 } else {
333 let v1 = self.c1.square();
336 let mut v0 = v1;
338 P::sub_and_mul_base_field_by_nonresidue(&mut v0, &self.c0.square());
339
340 v0.inverse().map(|v1| {
341 let c0 = self.c0 * &v1;
342 let c1 = -(self.c1 * &v1);
343 Self::new(c0, c1)
344 })
345 }
346 }
347
348 fn inverse_in_place(&mut self) -> Option<&mut Self> {
349 if let Some(inverse) = self.inverse() {
350 *self = inverse;
351 Some(self)
352 } else {
353 None
354 }
355 }
356
357 fn frobenius_map_in_place(&mut self, power: usize) {
358 self.c0.frobenius_map_in_place(power);
359 self.c1.frobenius_map_in_place(power);
360 P::mul_base_field_by_frob_coeff(&mut self.c1, power);
361 }
362
363 fn legendre(&self) -> LegendreSymbol {
364 self.norm().legendre()
375 }
376
377 fn sqrt(&self) -> Option<Self> {
378 if self.c1.is_zero() {
381 if self.c0.legendre().is_qr() {
387 return self.c0.sqrt().map(|c0| Self::new(c0, P::BaseField::ZERO));
389 } else {
390 return (self.c0.div(P::NONRESIDUE))
392 .sqrt()
393 .map(|res| Self::new(P::BaseField::ZERO, res));
394 }
395 }
396 let alpha = self.norm();
399
400 let mut two_inv = P::BasePrimeField::MODULUS;
403
404 two_inv.add_with_carry(&1u64.into());
405 two_inv.div2();
406
407 let two_inv = P::BasePrimeField::from(two_inv);
408 let two_inv = P::BaseField::from_base_prime_field(two_inv);
409
410 alpha.sqrt().and_then(|alpha| {
411 let mut delta = (alpha + &self.c0) * &two_inv;
412 if delta.legendre().is_qnr() {
413 delta -= α
414 }
415 let c0 = delta.sqrt().expect("Delta must have a square root");
416 let c0_inv = c0.inverse().expect("c0 must have an inverse");
417 let sqrt_cand = Self::new(c0, self.c1 * &two_inv * &c0_inv);
418 if sqrt_cand.square() == *self {
421 Some(sqrt_cand)
422 } else {
423 #[cfg(debug_assertions)]
424 {
425 use crate::fields::LegendreSymbol::*;
426 if self.legendre() != QuadraticNonResidue {
427 panic!(
428 "Input has a square root per its legendre symbol, but it was not found"
429 )
430 }
431 }
432 None
433 }
434 })
435 }
436
437 fn sqrt_in_place(&mut self) -> Option<&mut Self> {
438 (*self).sqrt().map(|sqrt| {
439 *self = sqrt;
440 self
441 })
442 }
443}
444
445impl<P: QuadExtConfig> Ord for QuadExtField<P> {
447 #[inline(always)]
448 fn cmp(&self, other: &Self) -> Ordering {
449 match self.c1.cmp(&other.c1) {
450 Ordering::Greater => Ordering::Greater,
451 Ordering::Less => Ordering::Less,
452 Ordering::Equal => self.c0.cmp(&other.c0),
453 }
454 }
455}
456
457impl<P: QuadExtConfig> PartialOrd for QuadExtField<P> {
458 #[inline(always)]
459 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
460 Some(self.cmp(other))
461 }
462}
463
464impl<P: QuadExtConfig> Zeroize for QuadExtField<P> {
465 fn zeroize(&mut self) {
468 self.c0.zeroize();
469 self.c1.zeroize();
470 }
471}
472
473impl<P: QuadExtConfig> From<u128> for QuadExtField<P> {
474 fn from(other: u128) -> Self {
475 Self::new(other.into(), P::BaseField::ZERO)
476 }
477}
478
479impl<P: QuadExtConfig> From<i128> for QuadExtField<P> {
480 #[inline]
481 fn from(val: i128) -> Self {
482 let abs = Self::from(val.unsigned_abs());
483 if val.is_positive() {
484 abs
485 } else {
486 -abs
487 }
488 }
489}
490
491impl<P: QuadExtConfig> From<u64> for QuadExtField<P> {
492 fn from(other: u64) -> Self {
493 Self::new(other.into(), P::BaseField::ZERO)
494 }
495}
496
497impl<P: QuadExtConfig> From<i64> for QuadExtField<P> {
498 #[inline]
499 fn from(val: i64) -> Self {
500 let abs = Self::from(val.unsigned_abs());
501 if val.is_positive() {
502 abs
503 } else {
504 -abs
505 }
506 }
507}
508
509impl<P: QuadExtConfig> From<u32> for QuadExtField<P> {
510 fn from(other: u32) -> Self {
511 Self::new(other.into(), P::BaseField::ZERO)
512 }
513}
514
515impl<P: QuadExtConfig> From<i32> for QuadExtField<P> {
516 #[inline]
517 fn from(val: i32) -> Self {
518 let abs = Self::from(val.unsigned_abs());
519 if val.is_positive() {
520 abs
521 } else {
522 -abs
523 }
524 }
525}
526
527impl<P: QuadExtConfig> From<u16> for QuadExtField<P> {
528 fn from(other: u16) -> Self {
529 Self::new(other.into(), P::BaseField::ZERO)
530 }
531}
532
533impl<P: QuadExtConfig> From<i16> for QuadExtField<P> {
534 #[inline]
535 fn from(val: i16) -> Self {
536 let abs = Self::from(val.unsigned_abs());
537 if val.is_positive() {
538 abs
539 } else {
540 -abs
541 }
542 }
543}
544
545impl<P: QuadExtConfig> From<u8> for QuadExtField<P> {
546 fn from(other: u8) -> Self {
547 Self::new(other.into(), P::BaseField::ZERO)
548 }
549}
550
551impl<P: QuadExtConfig> From<i8> for QuadExtField<P> {
552 #[inline]
553 fn from(val: i8) -> Self {
554 let abs = Self::from(val.unsigned_abs());
555 if val.is_positive() {
556 abs
557 } else {
558 -abs
559 }
560 }
561}
562
563impl<P: QuadExtConfig> From<bool> for QuadExtField<P> {
564 fn from(other: bool) -> Self {
565 Self::new(u8::from(other).into(), P::BaseField::ZERO)
566 }
567}
568
569impl<P: QuadExtConfig> Neg for QuadExtField<P> {
570 type Output = Self;
571 #[inline]
572 #[must_use]
573 fn neg(mut self) -> Self {
574 self.c0.neg_in_place();
575 self.c1.neg_in_place();
576 self
577 }
578}
579
580impl<P: QuadExtConfig> Distribution<QuadExtField<P>> for Standard {
581 #[inline]
582 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> QuadExtField<P> {
583 QuadExtField::new(UniformRand::rand(rng), UniformRand::rand(rng))
584 }
585}
586
587impl<'a, P: QuadExtConfig> Add<&'a QuadExtField<P>> for QuadExtField<P> {
588 type Output = Self;
589
590 #[inline]
591 fn add(mut self, other: &Self) -> Self {
592 self += other;
593 self
594 }
595}
596
597impl<'a, P: QuadExtConfig> Sub<&'a QuadExtField<P>> for QuadExtField<P> {
598 type Output = Self;
599
600 #[inline(always)]
601 fn sub(mut self, other: &Self) -> Self {
602 self -= other;
603 self
604 }
605}
606
607impl<'a, P: QuadExtConfig> Mul<&'a QuadExtField<P>> for QuadExtField<P> {
608 type Output = Self;
609
610 #[inline(always)]
611 fn mul(mut self, other: &Self) -> Self {
612 self *= other;
613 self
614 }
615}
616
617impl<'a, P: QuadExtConfig> Div<&'a QuadExtField<P>> for QuadExtField<P> {
618 type Output = Self;
619
620 #[inline]
621 fn div(mut self, other: &Self) -> Self {
622 self.mul_assign(&other.inverse().unwrap());
623 self
624 }
625}
626
627impl<'a, P: QuadExtConfig> AddAssign<&'a Self> for QuadExtField<P> {
628 #[inline]
629 fn add_assign(&mut self, other: &Self) {
630 self.c0 += &other.c0;
631 self.c1 += &other.c1;
632 }
633}
634
635impl<'a, P: QuadExtConfig> SubAssign<&'a Self> for QuadExtField<P> {
636 #[inline]
637 fn sub_assign(&mut self, other: &Self) {
638 self.c0 -= &other.c0;
639 self.c1 -= &other.c1;
640 }
641}
642
643impl_additive_ops_from_ref!(QuadExtField, QuadExtConfig);
644impl_multiplicative_ops_from_ref!(QuadExtField, QuadExtConfig);
645
646impl<'a, P: QuadExtConfig> MulAssign<&'a Self> for QuadExtField<P> {
647 #[inline]
648 fn mul_assign(&mut self, other: &Self) {
649 if Self::extension_degree() == 2 {
650 let c1_input = [self.c0, self.c1];
651 P::mul_base_field_by_nonresidue_in_place(&mut self.c1);
652 *self = Self::new(
653 P::BaseField::sum_of_products(&[self.c0, self.c1], &[other.c0, other.c1]),
654 P::BaseField::sum_of_products(&c1_input, &[other.c1, other.c0]),
655 )
656 } else {
657 let mut v0 = self.c0;
660 v0 *= &other.c0;
661 let mut v1 = self.c1;
662 v1 *= &other.c1;
663
664 self.c1 += &self.c0;
665 self.c1 *= &(other.c0 + &other.c1);
666 self.c1 -= &v0;
667 self.c1 -= &v1;
668 self.c0 = v1;
669 P::mul_base_field_by_nonresidue_and_add(&mut self.c0, &v0);
670 }
671 }
672}
673
674impl<'a, P: QuadExtConfig> DivAssign<&'a Self> for QuadExtField<P> {
675 #[inline]
676 fn div_assign(&mut self, other: &Self) {
677 self.mul_assign(&other.inverse().unwrap());
678 }
679}
680
681impl<P: QuadExtConfig> fmt::Display for QuadExtField<P> {
682 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
683 write!(f, "QuadExtField({} + {} * u)", self.c0, self.c1)
684 }
685}
686
687impl<P: QuadExtConfig> CanonicalSerializeWithFlags for QuadExtField<P> {
688 #[inline]
689 fn serialize_with_flags<W: Write, F: Flags>(
690 &self,
691 mut writer: W,
692 flags: F,
693 ) -> Result<(), SerializationError> {
694 self.c0.serialize_compressed(&mut writer)?;
695 self.c1.serialize_with_flags(&mut writer, flags)?;
696 Ok(())
697 }
698
699 #[inline]
700 fn serialized_size_with_flags<F: Flags>(&self) -> usize {
701 self.c0.compressed_size() + self.c1.serialized_size_with_flags::<F>()
702 }
703}
704
705impl<P: QuadExtConfig> CanonicalSerialize for QuadExtField<P> {
706 #[inline]
707 fn serialize_with_mode<W: Write>(
708 &self,
709 writer: W,
710 _compress: Compress,
711 ) -> Result<(), SerializationError> {
712 self.serialize_with_flags(writer, EmptyFlags)
713 }
714
715 #[inline]
716 fn serialized_size(&self, _compress: Compress) -> usize {
717 self.serialized_size_with_flags::<EmptyFlags>()
718 }
719}
720
721impl<P: QuadExtConfig> CanonicalDeserializeWithFlags for QuadExtField<P> {
722 #[inline]
723 fn deserialize_with_flags<R: Read, F: Flags>(
724 mut reader: R,
725 ) -> Result<(Self, F), SerializationError> {
726 let c0 = CanonicalDeserialize::deserialize_compressed(&mut reader)?;
727 let (c1, flags) = CanonicalDeserializeWithFlags::deserialize_with_flags(&mut reader)?;
728 Ok((QuadExtField::new(c0, c1), flags))
729 }
730}
731
732impl<P: QuadExtConfig> Valid for QuadExtField<P> {
733 fn check(&self) -> Result<(), SerializationError> {
734 self.c0.check()?;
735 self.c1.check()
736 }
737}
738
739impl<P: QuadExtConfig> CanonicalDeserialize for QuadExtField<P> {
740 #[inline]
741 fn deserialize_with_mode<R: Read>(
742 mut reader: R,
743 compress: Compress,
744 validate: Validate,
745 ) -> Result<Self, SerializationError> {
746 let c0: P::BaseField =
747 CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
748 let c1: P::BaseField =
749 CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
750 Ok(QuadExtField::new(c0, c1))
751 }
752}
753
754impl<P: QuadExtConfig> ToConstraintField<P::BasePrimeField> for QuadExtField<P>
755where
756 P::BaseField: ToConstraintField<P::BasePrimeField>,
757{
758 fn to_field_elements(&self) -> Option<Vec<P::BasePrimeField>> {
759 let mut res = Vec::new();
760 let mut c0_elems = self.c0.to_field_elements()?;
761 let mut c1_elems = self.c1.to_field_elements()?;
762
763 res.append(&mut c0_elems);
764 res.append(&mut c1_elems);
765
766 Some(res)
767 }
768}
769
770#[cfg(test)]
771mod quad_ext_tests {
772 use super::*;
773 use ark_std::test_rng;
774 use ark_test_curves::{
775 bls12_381::{Fq, Fq2},
776 Field,
777 };
778
779 #[test]
780 fn test_from_base_prime_field_elements() {
781 let ext_degree = Fq2::extension_degree() as usize;
782 let max_num_elems_to_test = 4;
784 for d in 0..max_num_elems_to_test {
785 if d == ext_degree {
786 continue;
787 }
788 let mut random_coeffs = Vec::<Fq>::new();
789 for _ in 0..d {
790 random_coeffs.push(Fq::rand(&mut test_rng()));
791 }
792 let res = Fq2::from_base_prime_field_elems(&random_coeffs);
793 assert_eq!(res, None);
794 }
795 let number_of_tests = 10;
798 for _ in 0..number_of_tests {
799 let mut random_coeffs = Vec::<Fq>::new();
800 for _ in 0..ext_degree {
801 random_coeffs.push(Fq::rand(&mut test_rng()));
802 }
803 let actual = Fq2::from_base_prime_field_elems(&random_coeffs).unwrap();
804 let expected = Fq2::new(random_coeffs[0], random_coeffs[1]);
805 assert_eq!(actual, expected);
806 }
807 }
808
809 #[test]
810 fn test_from_base_prime_field_element() {
811 let ext_degree = Fq2::extension_degree() as usize;
812 let max_num_elems_to_test = 10;
813 for _ in 0..max_num_elems_to_test {
814 let mut random_coeffs = vec![Fq::zero(); ext_degree];
815 let random_coeff = Fq::rand(&mut test_rng());
816 let res = Fq2::from_base_prime_field(random_coeff);
817 random_coeffs[0] = random_coeff;
818 assert_eq!(
819 res,
820 Fq2::from_base_prime_field_elems(&random_coeffs).unwrap()
821 );
822 }
823 }
824}